mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-10-08 14:51:49 +00:00
Merged 5.1.N (5.1.2) to 5.2.N (5.2.1)
125605 rmunteanu: Merged 5.1.1 (5.1.1) to 5.1.N (5.1.2) 125498 slanglois: MNT-16155 Update source headers - remove svn:eol-style property on Java and JSP source files git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/BRANCHES/DEV/5.2.N/root@125783 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
@@ -1,24 +1,24 @@
|
||||
package org.alfresco.repo.admin;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
|
||||
/**
|
||||
* @author Andy
|
||||
*
|
||||
*/
|
||||
public class DummyIndexConfigurationCheckerImpl implements IndexConfigurationChecker
|
||||
{
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.alfresco.repo.admin.IndexConfigurationChecker#checkIndexConfiguration()
|
||||
*/
|
||||
@Override
|
||||
public List<StoreRef> checkIndexConfiguration()
|
||||
{
|
||||
return Collections.<StoreRef>emptyList();
|
||||
}
|
||||
|
||||
}
|
||||
package org.alfresco.repo.admin;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
|
||||
/**
|
||||
* @author Andy
|
||||
*
|
||||
*/
|
||||
public class DummyIndexConfigurationCheckerImpl implements IndexConfigurationChecker
|
||||
{
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.alfresco.repo.admin.IndexConfigurationChecker#checkIndexConfiguration()
|
||||
*/
|
||||
@Override
|
||||
public List<StoreRef> checkIndexConfiguration()
|
||||
{
|
||||
return Collections.<StoreRef>emptyList();
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -1,18 +1,18 @@
|
||||
package org.alfresco.repo.admin;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
|
||||
/**
|
||||
* @author Andy
|
||||
*
|
||||
*/
|
||||
public interface IndexConfigurationChecker
|
||||
{
|
||||
/**
|
||||
* Check that the index contains root entries for all the stores that would be expected
|
||||
* @return - the stores with missing indexes
|
||||
*/
|
||||
public List<StoreRef> checkIndexConfiguration();
|
||||
}
|
||||
package org.alfresco.repo.admin;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
|
||||
/**
|
||||
* @author Andy
|
||||
*
|
||||
*/
|
||||
public interface IndexConfigurationChecker
|
||||
{
|
||||
/**
|
||||
* Check that the index contains root entries for all the stores that would be expected
|
||||
* @return - the stores with missing indexes
|
||||
*/
|
||||
public List<StoreRef> checkIndexConfiguration();
|
||||
}
|
||||
|
@@ -1,128 +1,128 @@
|
||||
package org.alfresco.repo.admin;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.extensions.surf.util.AbstractLifecycleBean;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
|
||||
public class IndexConfigurationCheckerBootstrapBean extends AbstractLifecycleBean
|
||||
{
|
||||
private static Log logger = LogFactory.getLog(IndexConfigurationCheckerBootstrapBean.class);
|
||||
|
||||
private IndexConfigurationChecker indexConfigurationChecker;
|
||||
|
||||
private TransactionService transactionService;
|
||||
|
||||
private boolean strict;
|
||||
|
||||
private String dirRoot;
|
||||
|
||||
@Override
|
||||
protected void onBootstrap(ApplicationEvent event)
|
||||
{
|
||||
RetryingTransactionCallback<Object> checkWork = new RetryingTransactionCallback<Object>()
|
||||
{
|
||||
public Object execute() throws Exception
|
||||
{
|
||||
// reindex
|
||||
|
||||
log.info("Checking/Recovering indexes ...");
|
||||
check();
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(checkWork, true);
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
private void check()
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Starting index configuration check: " + this);
|
||||
}
|
||||
|
||||
|
||||
File dirRootFile = new File(dirRoot);
|
||||
|
||||
|
||||
List<StoreRef> missingIndexStoreRefs = indexConfigurationChecker.checkIndexConfiguration();
|
||||
|
||||
// check for missing indexes
|
||||
int missingStoreIndexes = missingIndexStoreRefs.size();
|
||||
if (missingStoreIndexes > 0)
|
||||
{
|
||||
String msg = I18NUtil.getMessage(ConfigurationChecker.ERR_MISSING_INDEXES, missingStoreIndexes);
|
||||
logger.error(msg);
|
||||
String msgRecover = I18NUtil.getMessage(ConfigurationChecker.MSG_HOWTO_INDEX_RECOVER);
|
||||
logger.info(msgRecover);
|
||||
}
|
||||
|
||||
// handle either content or indexes missing
|
||||
if (missingStoreIndexes > 0)
|
||||
{
|
||||
String msg = I18NUtil.getMessage(ConfigurationChecker.ERR_FIX_DIR_ROOT, dirRootFile);
|
||||
logger.error(msg);
|
||||
|
||||
// Now determine the failure behaviour
|
||||
if (strict)
|
||||
{
|
||||
throw new AlfrescoRuntimeException(msg);
|
||||
}
|
||||
else
|
||||
{
|
||||
String warn = I18NUtil.getMessage(ConfigurationChecker.WARN_STARTING_WITH_ERRORS);
|
||||
logger.warn(warn);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onShutdown(ApplicationEvent event)
|
||||
{
|
||||
// Nothing to do
|
||||
}
|
||||
|
||||
|
||||
|
||||
public IndexConfigurationChecker getIndexConfigurationChecker()
|
||||
{
|
||||
return indexConfigurationChecker;
|
||||
}
|
||||
|
||||
public void setIndexConfigurationChecker(IndexConfigurationChecker indexConfigurationChecker)
|
||||
{
|
||||
this.indexConfigurationChecker = indexConfigurationChecker;
|
||||
}
|
||||
|
||||
public void setStrict(boolean strict)
|
||||
{
|
||||
this.strict = strict;
|
||||
}
|
||||
|
||||
public void setDirRoot(String dirRoot)
|
||||
{
|
||||
this.dirRoot = dirRoot;
|
||||
}
|
||||
|
||||
public void setTransactionService(TransactionService transactionService)
|
||||
{
|
||||
this.transactionService = transactionService;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
package org.alfresco.repo.admin;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.extensions.surf.util.AbstractLifecycleBean;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
|
||||
public class IndexConfigurationCheckerBootstrapBean extends AbstractLifecycleBean
|
||||
{
|
||||
private static Log logger = LogFactory.getLog(IndexConfigurationCheckerBootstrapBean.class);
|
||||
|
||||
private IndexConfigurationChecker indexConfigurationChecker;
|
||||
|
||||
private TransactionService transactionService;
|
||||
|
||||
private boolean strict;
|
||||
|
||||
private String dirRoot;
|
||||
|
||||
@Override
|
||||
protected void onBootstrap(ApplicationEvent event)
|
||||
{
|
||||
RetryingTransactionCallback<Object> checkWork = new RetryingTransactionCallback<Object>()
|
||||
{
|
||||
public Object execute() throws Exception
|
||||
{
|
||||
// reindex
|
||||
|
||||
log.info("Checking/Recovering indexes ...");
|
||||
check();
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(checkWork, true);
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
private void check()
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Starting index configuration check: " + this);
|
||||
}
|
||||
|
||||
|
||||
File dirRootFile = new File(dirRoot);
|
||||
|
||||
|
||||
List<StoreRef> missingIndexStoreRefs = indexConfigurationChecker.checkIndexConfiguration();
|
||||
|
||||
// check for missing indexes
|
||||
int missingStoreIndexes = missingIndexStoreRefs.size();
|
||||
if (missingStoreIndexes > 0)
|
||||
{
|
||||
String msg = I18NUtil.getMessage(ConfigurationChecker.ERR_MISSING_INDEXES, missingStoreIndexes);
|
||||
logger.error(msg);
|
||||
String msgRecover = I18NUtil.getMessage(ConfigurationChecker.MSG_HOWTO_INDEX_RECOVER);
|
||||
logger.info(msgRecover);
|
||||
}
|
||||
|
||||
// handle either content or indexes missing
|
||||
if (missingStoreIndexes > 0)
|
||||
{
|
||||
String msg = I18NUtil.getMessage(ConfigurationChecker.ERR_FIX_DIR_ROOT, dirRootFile);
|
||||
logger.error(msg);
|
||||
|
||||
// Now determine the failure behaviour
|
||||
if (strict)
|
||||
{
|
||||
throw new AlfrescoRuntimeException(msg);
|
||||
}
|
||||
else
|
||||
{
|
||||
String warn = I18NUtil.getMessage(ConfigurationChecker.WARN_STARTING_WITH_ERRORS);
|
||||
logger.warn(warn);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onShutdown(ApplicationEvent event)
|
||||
{
|
||||
// Nothing to do
|
||||
}
|
||||
|
||||
|
||||
|
||||
public IndexConfigurationChecker getIndexConfigurationChecker()
|
||||
{
|
||||
return indexConfigurationChecker;
|
||||
}
|
||||
|
||||
public void setIndexConfigurationChecker(IndexConfigurationChecker indexConfigurationChecker)
|
||||
{
|
||||
this.indexConfigurationChecker = indexConfigurationChecker;
|
||||
}
|
||||
|
||||
public void setStrict(boolean strict)
|
||||
{
|
||||
this.strict = strict;
|
||||
}
|
||||
|
||||
public void setDirRoot(String dirRoot)
|
||||
{
|
||||
this.dirRoot = dirRoot;
|
||||
}
|
||||
|
||||
public void setTransactionService(TransactionService transactionService)
|
||||
{
|
||||
this.transactionService = transactionService;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@@ -1,176 +1,176 @@
|
||||
package org.alfresco.repo.admin;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.repo.node.index.FullIndexRecoveryComponent.RecoveryMode;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.InvalidStoreRefException;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.cmr.search.ResultSet;
|
||||
import org.alfresco.service.cmr.search.SearchParameters;
|
||||
import org.alfresco.service.cmr.search.SearchService;
|
||||
import org.alfresco.service.namespace.RegexQNamePattern;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
|
||||
/**
|
||||
* @author Andy
|
||||
*
|
||||
*/
|
||||
public class IndexConfigurationCheckerImpl implements IndexConfigurationChecker
|
||||
{
|
||||
private static Log logger = LogFactory.getLog(IndexConfigurationCheckerImpl.class);
|
||||
|
||||
private static final String ERR_DUPLICATE_ROOT_NODE = "system.config_check.err.indexes.duplicate_root_node";
|
||||
|
||||
private RecoveryMode indexRecoveryMode;
|
||||
private NodeService nodeService;
|
||||
private SearchService searchService;
|
||||
|
||||
/**
|
||||
* Set the index recovert mode
|
||||
* @param indexRecoveryMode RecoveryMode
|
||||
*/
|
||||
public void setIndexRecoveryMode(RecoveryMode indexRecoveryMode)
|
||||
{
|
||||
this.indexRecoveryMode = indexRecoveryMode;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Set the node service
|
||||
* @param nodeService NodeService
|
||||
*/
|
||||
public void setNodeService(NodeService nodeService)
|
||||
{
|
||||
this.nodeService = nodeService;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Set the search service
|
||||
* @param searchService SearchService
|
||||
*/
|
||||
public void setSearchService(SearchService searchService)
|
||||
{
|
||||
this.searchService = searchService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<StoreRef> checkIndexConfiguration()
|
||||
{
|
||||
// get all root nodes from the NodeService, i.e. database
|
||||
List<StoreRef> storeRefs = nodeService.getStores();
|
||||
List<StoreRef> missingIndexStoreRefs = new ArrayList<StoreRef>(0);
|
||||
for (StoreRef storeRef : storeRefs)
|
||||
{
|
||||
NodeRef rootNodeRef = null;
|
||||
try
|
||||
{
|
||||
rootNodeRef = nodeService.getRootNode(storeRef);
|
||||
}
|
||||
catch (InvalidStoreRefException e)
|
||||
{
|
||||
// the store is invalid and will therefore not have a root node entry
|
||||
continue;
|
||||
}
|
||||
|
||||
// Are we creating the store - in which case we do not check
|
||||
// See MNT-11612
|
||||
int countChildAssoc = 0;
|
||||
if (storeRef.getProtocol().equals(StoreRef.PROTOCOL_AVM))
|
||||
{
|
||||
// AVM does not support nodeService.countChildAssocs()
|
||||
long start = 0;
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Counting childAssocs for store: " + storeRef);
|
||||
start = System.currentTimeMillis();
|
||||
}
|
||||
List<ChildAssociationRef> childAssocs = nodeService.getChildAssocs(rootNodeRef,
|
||||
RegexQNamePattern.MATCH_ALL, RegexQNamePattern.MATCH_ALL, 1, false);
|
||||
countChildAssoc = childAssocs.size();
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Time for counting childAssocs for : " + storeRef + " time="
|
||||
+ (System.currentTimeMillis() - start));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
long start = 0;
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Counting childAssocs for store: " + storeRef);
|
||||
start = System.currentTimeMillis();
|
||||
}
|
||||
countChildAssoc = nodeService.countChildAssocs(rootNodeRef, true);
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Time for counting childAssocs for : " + storeRef + " time="
|
||||
+ (System.currentTimeMillis() - start));
|
||||
}
|
||||
}
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Counting childAssocs for store: " + storeRef + " countChildAssoc = " + countChildAssoc);
|
||||
}
|
||||
if (countChildAssoc == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (indexRecoveryMode != RecoveryMode.FULL)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Checking index for store: " + storeRef);
|
||||
}
|
||||
|
||||
// perform a Lucene query for the root node
|
||||
SearchParameters sp = new SearchParameters();
|
||||
sp.addStore(storeRef);
|
||||
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp.setQuery("ISROOT:T");
|
||||
|
||||
ResultSet results = null;
|
||||
int size = 0;
|
||||
try
|
||||
{
|
||||
results = searchService.query(sp);
|
||||
size = results.length();
|
||||
}
|
||||
finally
|
||||
{
|
||||
try { results.close(); } catch (Throwable e) {}
|
||||
}
|
||||
|
||||
if (size == 0)
|
||||
{
|
||||
// indexes missing for root node
|
||||
missingIndexStoreRefs.add(storeRef);
|
||||
// debug
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Index missing for store: \n" +
|
||||
" store: " + storeRef);
|
||||
}
|
||||
}
|
||||
else if (size > 1)
|
||||
{
|
||||
// there are duplicates
|
||||
String msg = I18NUtil.getMessage(ERR_DUPLICATE_ROOT_NODE, storeRef);
|
||||
throw new AlfrescoRuntimeException(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
return missingIndexStoreRefs;
|
||||
}
|
||||
|
||||
}
|
||||
package org.alfresco.repo.admin;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.repo.node.index.FullIndexRecoveryComponent.RecoveryMode;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.InvalidStoreRefException;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.cmr.search.ResultSet;
|
||||
import org.alfresco.service.cmr.search.SearchParameters;
|
||||
import org.alfresco.service.cmr.search.SearchService;
|
||||
import org.alfresco.service.namespace.RegexQNamePattern;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
|
||||
/**
|
||||
* @author Andy
|
||||
*
|
||||
*/
|
||||
public class IndexConfigurationCheckerImpl implements IndexConfigurationChecker
|
||||
{
|
||||
private static Log logger = LogFactory.getLog(IndexConfigurationCheckerImpl.class);
|
||||
|
||||
private static final String ERR_DUPLICATE_ROOT_NODE = "system.config_check.err.indexes.duplicate_root_node";
|
||||
|
||||
private RecoveryMode indexRecoveryMode;
|
||||
private NodeService nodeService;
|
||||
private SearchService searchService;
|
||||
|
||||
/**
|
||||
* Set the index recovert mode
|
||||
* @param indexRecoveryMode RecoveryMode
|
||||
*/
|
||||
public void setIndexRecoveryMode(RecoveryMode indexRecoveryMode)
|
||||
{
|
||||
this.indexRecoveryMode = indexRecoveryMode;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Set the node service
|
||||
* @param nodeService NodeService
|
||||
*/
|
||||
public void setNodeService(NodeService nodeService)
|
||||
{
|
||||
this.nodeService = nodeService;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Set the search service
|
||||
* @param searchService SearchService
|
||||
*/
|
||||
public void setSearchService(SearchService searchService)
|
||||
{
|
||||
this.searchService = searchService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<StoreRef> checkIndexConfiguration()
|
||||
{
|
||||
// get all root nodes from the NodeService, i.e. database
|
||||
List<StoreRef> storeRefs = nodeService.getStores();
|
||||
List<StoreRef> missingIndexStoreRefs = new ArrayList<StoreRef>(0);
|
||||
for (StoreRef storeRef : storeRefs)
|
||||
{
|
||||
NodeRef rootNodeRef = null;
|
||||
try
|
||||
{
|
||||
rootNodeRef = nodeService.getRootNode(storeRef);
|
||||
}
|
||||
catch (InvalidStoreRefException e)
|
||||
{
|
||||
// the store is invalid and will therefore not have a root node entry
|
||||
continue;
|
||||
}
|
||||
|
||||
// Are we creating the store - in which case we do not check
|
||||
// See MNT-11612
|
||||
int countChildAssoc = 0;
|
||||
if (storeRef.getProtocol().equals(StoreRef.PROTOCOL_AVM))
|
||||
{
|
||||
// AVM does not support nodeService.countChildAssocs()
|
||||
long start = 0;
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Counting childAssocs for store: " + storeRef);
|
||||
start = System.currentTimeMillis();
|
||||
}
|
||||
List<ChildAssociationRef> childAssocs = nodeService.getChildAssocs(rootNodeRef,
|
||||
RegexQNamePattern.MATCH_ALL, RegexQNamePattern.MATCH_ALL, 1, false);
|
||||
countChildAssoc = childAssocs.size();
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Time for counting childAssocs for : " + storeRef + " time="
|
||||
+ (System.currentTimeMillis() - start));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
long start = 0;
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Counting childAssocs for store: " + storeRef);
|
||||
start = System.currentTimeMillis();
|
||||
}
|
||||
countChildAssoc = nodeService.countChildAssocs(rootNodeRef, true);
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Time for counting childAssocs for : " + storeRef + " time="
|
||||
+ (System.currentTimeMillis() - start));
|
||||
}
|
||||
}
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Counting childAssocs for store: " + storeRef + " countChildAssoc = " + countChildAssoc);
|
||||
}
|
||||
if (countChildAssoc == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (indexRecoveryMode != RecoveryMode.FULL)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Checking index for store: " + storeRef);
|
||||
}
|
||||
|
||||
// perform a Lucene query for the root node
|
||||
SearchParameters sp = new SearchParameters();
|
||||
sp.addStore(storeRef);
|
||||
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp.setQuery("ISROOT:T");
|
||||
|
||||
ResultSet results = null;
|
||||
int size = 0;
|
||||
try
|
||||
{
|
||||
results = searchService.query(sp);
|
||||
size = results.length();
|
||||
}
|
||||
finally
|
||||
{
|
||||
try { results.close(); } catch (Throwable e) {}
|
||||
}
|
||||
|
||||
if (size == 0)
|
||||
{
|
||||
// indexes missing for root node
|
||||
missingIndexStoreRefs.add(storeRef);
|
||||
// debug
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Index missing for store: \n" +
|
||||
" store: " + storeRef);
|
||||
}
|
||||
}
|
||||
else if (size > 1)
|
||||
{
|
||||
// there are duplicates
|
||||
String msg = I18NUtil.getMessage(ERR_DUPLICATE_ROOT_NODE, storeRef);
|
||||
throw new AlfrescoRuntimeException(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
return missingIndexStoreRefs;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -1,36 +1,36 @@
|
||||
package org.alfresco.repo.admin;
|
||||
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.extensions.surf.util.AbstractLifecycleBean;
|
||||
|
||||
/**
|
||||
* Track repo bootstrap so sub systems do not duplciate stuff or do it too early ... eg index rebuild/check
|
||||
*
|
||||
* @author andyh
|
||||
*/
|
||||
public class RepositoryEndBootstrapBean extends AbstractLifecycleBean
|
||||
{
|
||||
private RepositoryState repositoryState;
|
||||
|
||||
public RepositoryState getRepositoryState()
|
||||
{
|
||||
return repositoryState;
|
||||
}
|
||||
|
||||
public void setRepositoryState(RepositoryState repositoryState)
|
||||
{
|
||||
this.repositoryState = repositoryState;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onBootstrap(ApplicationEvent event)
|
||||
{
|
||||
repositoryState.setBootstrapping(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onShutdown(ApplicationEvent event)
|
||||
{
|
||||
// NOOP
|
||||
}
|
||||
package org.alfresco.repo.admin;
|
||||
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.extensions.surf.util.AbstractLifecycleBean;
|
||||
|
||||
/**
|
||||
* Track repo bootstrap so sub systems do not duplciate stuff or do it too early ... eg index rebuild/check
|
||||
*
|
||||
* @author andyh
|
||||
*/
|
||||
public class RepositoryEndBootstrapBean extends AbstractLifecycleBean
|
||||
{
|
||||
private RepositoryState repositoryState;
|
||||
|
||||
public RepositoryState getRepositoryState()
|
||||
{
|
||||
return repositoryState;
|
||||
}
|
||||
|
||||
public void setRepositoryState(RepositoryState repositoryState)
|
||||
{
|
||||
this.repositoryState = repositoryState;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onBootstrap(ApplicationEvent event)
|
||||
{
|
||||
repositoryState.setBootstrapping(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onShutdown(ApplicationEvent event)
|
||||
{
|
||||
// NOOP
|
||||
}
|
||||
}
|
@@ -1,36 +1,36 @@
|
||||
package org.alfresco.repo.admin;
|
||||
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.extensions.surf.util.AbstractLifecycleBean;
|
||||
|
||||
/**
|
||||
* Track repo bootstrap so sub systems do not duplciate stuff or do it too early ... eg index rebuild/check
|
||||
*
|
||||
* @author andyh
|
||||
*/
|
||||
public class RepositoryStartBootstrapBean extends AbstractLifecycleBean
|
||||
{
|
||||
private RepositoryState repositoryState;
|
||||
|
||||
public RepositoryState getRepositoryState()
|
||||
{
|
||||
return repositoryState;
|
||||
}
|
||||
|
||||
public void setRepositoryState(RepositoryState repositoryState)
|
||||
{
|
||||
this.repositoryState = repositoryState;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onBootstrap(ApplicationEvent event)
|
||||
{
|
||||
repositoryState.setBootstrapping(true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onShutdown(ApplicationEvent event)
|
||||
{
|
||||
// NOOP
|
||||
}
|
||||
package org.alfresco.repo.admin;
|
||||
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.extensions.surf.util.AbstractLifecycleBean;
|
||||
|
||||
/**
|
||||
* Track repo bootstrap so sub systems do not duplciate stuff or do it too early ... eg index rebuild/check
|
||||
*
|
||||
* @author andyh
|
||||
*/
|
||||
public class RepositoryStartBootstrapBean extends AbstractLifecycleBean
|
||||
{
|
||||
private RepositoryState repositoryState;
|
||||
|
||||
public RepositoryState getRepositoryState()
|
||||
{
|
||||
return repositoryState;
|
||||
}
|
||||
|
||||
public void setRepositoryState(RepositoryState repositoryState)
|
||||
{
|
||||
this.repositoryState = repositoryState;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onBootstrap(ApplicationEvent event)
|
||||
{
|
||||
repositoryState.setBootstrapping(true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onShutdown(ApplicationEvent event)
|
||||
{
|
||||
// NOOP
|
||||
}
|
||||
}
|
@@ -1,48 +1,48 @@
|
||||
package org.alfresco.repo.admin;
|
||||
|
||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||
|
||||
/**
|
||||
* A class that maintains a thread-safe ready indicator on the current bootstrap state of the repository.
|
||||
*
|
||||
* @author Andy
|
||||
*
|
||||
*/
|
||||
public class RepositoryState
|
||||
{
|
||||
private boolean bootstrapping;
|
||||
private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
|
||||
|
||||
/**
|
||||
* Determine if the repository is ready to use.
|
||||
*
|
||||
* @return <tt>true</tt> if the repository bootstrap process is still going,
|
||||
* or <tt>false</tt> if the repository is ready to use
|
||||
*/
|
||||
public boolean isBootstrapping()
|
||||
{
|
||||
this.lock.readLock().lock();
|
||||
try
|
||||
{
|
||||
return bootstrapping;
|
||||
}
|
||||
finally
|
||||
{
|
||||
this.lock.readLock().unlock();
|
||||
}
|
||||
}
|
||||
|
||||
public void setBootstrapping(boolean bootstrapping)
|
||||
{
|
||||
this.lock.writeLock().lock();
|
||||
try
|
||||
{
|
||||
this.bootstrapping = bootstrapping;
|
||||
}
|
||||
finally
|
||||
{
|
||||
this.lock.writeLock().unlock();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
package org.alfresco.repo.admin;
|
||||
|
||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||
|
||||
/**
|
||||
* A class that maintains a thread-safe ready indicator on the current bootstrap state of the repository.
|
||||
*
|
||||
* @author Andy
|
||||
*
|
||||
*/
|
||||
public class RepositoryState
|
||||
{
|
||||
private boolean bootstrapping;
|
||||
private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
|
||||
|
||||
/**
|
||||
* Determine if the repository is ready to use.
|
||||
*
|
||||
* @return <tt>true</tt> if the repository bootstrap process is still going,
|
||||
* or <tt>false</tt> if the repository is ready to use
|
||||
*/
|
||||
public boolean isBootstrapping()
|
||||
{
|
||||
this.lock.readLock().lock();
|
||||
try
|
||||
{
|
||||
return bootstrapping;
|
||||
}
|
||||
finally
|
||||
{
|
||||
this.lock.readLock().unlock();
|
||||
}
|
||||
}
|
||||
|
||||
public void setBootstrapping(boolean bootstrapping)
|
||||
{
|
||||
this.lock.writeLock().lock();
|
||||
try
|
||||
{
|
||||
this.bootstrapping = bootstrapping;
|
||||
}
|
||||
finally
|
||||
{
|
||||
this.lock.writeLock().unlock();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -1,100 +1,100 @@
|
||||
package org.alfresco.repo.admin.patch;
|
||||
|
||||
import org.alfresco.service.descriptor.Descriptor;
|
||||
import org.alfresco.service.descriptor.DescriptorService;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.extensions.surf.util.AbstractLifecycleBean;
|
||||
|
||||
/**
|
||||
* @author Andy
|
||||
*/
|
||||
public class OptionalPatchApplicationCheckBootstrapBean extends AbstractLifecycleBean
|
||||
{
|
||||
PatchService patchService;
|
||||
|
||||
Patch patch;
|
||||
|
||||
DescriptorService descriptorService;
|
||||
|
||||
volatile boolean patchApplied = false;
|
||||
|
||||
/**
|
||||
* @param patchService
|
||||
* the patchService to set
|
||||
*/
|
||||
public void setPatchService(PatchService patchService)
|
||||
{
|
||||
this.patchService = patchService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param patch
|
||||
* the patch to set
|
||||
*/
|
||||
public void setPatch(Patch patch)
|
||||
{
|
||||
this.patch = patch;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param descriptorService
|
||||
* the descriptorService to set
|
||||
*/
|
||||
public void setDescriptorService(DescriptorService descriptorService)
|
||||
{
|
||||
this.descriptorService = descriptorService;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.extensions.surf.util.AbstractLifecycleBean#onBootstrap(org.springframework.context.
|
||||
* ApplicationEvent)
|
||||
*/
|
||||
@Override
|
||||
protected void onBootstrap(ApplicationEvent event)
|
||||
{
|
||||
Descriptor descriptor = descriptorService.getInstalledRepositoryDescriptor();
|
||||
if (patch == null)
|
||||
{
|
||||
patchApplied = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
AppliedPatch appliedPatch = patchService.getPatch(patch.getId());
|
||||
if (appliedPatch == null)
|
||||
{
|
||||
patchApplied = patch.getFixesToSchema() < descriptor.getSchema();
|
||||
}
|
||||
else
|
||||
{
|
||||
patchApplied = appliedPatch.getSucceeded();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.extensions.surf.util.AbstractLifecycleBean#onShutdown(org.springframework.context.
|
||||
* ApplicationEvent)
|
||||
*/
|
||||
@Override
|
||||
protected void onShutdown(ApplicationEvent event)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Was the patch applied - or was it not applied
|
||||
*
|
||||
* @return boolean
|
||||
*/
|
||||
public boolean getPatchApplied()
|
||||
{
|
||||
return patchApplied;
|
||||
}
|
||||
|
||||
public String getPatchId()
|
||||
{
|
||||
return patch.getId();
|
||||
}
|
||||
}
|
||||
package org.alfresco.repo.admin.patch;
|
||||
|
||||
import org.alfresco.service.descriptor.Descriptor;
|
||||
import org.alfresco.service.descriptor.DescriptorService;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.extensions.surf.util.AbstractLifecycleBean;
|
||||
|
||||
/**
|
||||
* @author Andy
|
||||
*/
|
||||
public class OptionalPatchApplicationCheckBootstrapBean extends AbstractLifecycleBean
|
||||
{
|
||||
PatchService patchService;
|
||||
|
||||
Patch patch;
|
||||
|
||||
DescriptorService descriptorService;
|
||||
|
||||
volatile boolean patchApplied = false;
|
||||
|
||||
/**
|
||||
* @param patchService
|
||||
* the patchService to set
|
||||
*/
|
||||
public void setPatchService(PatchService patchService)
|
||||
{
|
||||
this.patchService = patchService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param patch
|
||||
* the patch to set
|
||||
*/
|
||||
public void setPatch(Patch patch)
|
||||
{
|
||||
this.patch = patch;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param descriptorService
|
||||
* the descriptorService to set
|
||||
*/
|
||||
public void setDescriptorService(DescriptorService descriptorService)
|
||||
{
|
||||
this.descriptorService = descriptorService;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.extensions.surf.util.AbstractLifecycleBean#onBootstrap(org.springframework.context.
|
||||
* ApplicationEvent)
|
||||
*/
|
||||
@Override
|
||||
protected void onBootstrap(ApplicationEvent event)
|
||||
{
|
||||
Descriptor descriptor = descriptorService.getInstalledRepositoryDescriptor();
|
||||
if (patch == null)
|
||||
{
|
||||
patchApplied = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
AppliedPatch appliedPatch = patchService.getPatch(patch.getId());
|
||||
if (appliedPatch == null)
|
||||
{
|
||||
patchApplied = patch.getFixesToSchema() < descriptor.getSchema();
|
||||
}
|
||||
else
|
||||
{
|
||||
patchApplied = appliedPatch.getSucceeded();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.extensions.surf.util.AbstractLifecycleBean#onShutdown(org.springframework.context.
|
||||
* ApplicationEvent)
|
||||
*/
|
||||
@Override
|
||||
protected void onShutdown(ApplicationEvent event)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Was the patch applied - or was it not applied
|
||||
*
|
||||
* @return boolean
|
||||
*/
|
||||
public boolean getPatchApplied()
|
||||
{
|
||||
return patchApplied;
|
||||
}
|
||||
|
||||
public String getPatchId()
|
||||
{
|
||||
return patch.getId();
|
||||
}
|
||||
}
|
||||
|
@@ -1,52 +1,52 @@
|
||||
package org.alfresco.repo.admin.patch;
|
||||
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
|
||||
public class SimplePatch extends AbstractPatch
|
||||
{
|
||||
public static final String MSG_SUCCESS = "SimplePatch applied successfully";
|
||||
|
||||
/**
|
||||
* Default constructor for Spring config
|
||||
*/
|
||||
public SimplePatch()
|
||||
{
|
||||
}
|
||||
|
||||
/**
|
||||
* Overrides the base class version to do nothing, i.e. it does not self-register
|
||||
*/
|
||||
@Override
|
||||
public void init()
|
||||
{
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper constructor for some tests. Default properties are set automatically.
|
||||
*
|
||||
* @param transactionService TransactionService
|
||||
* @param requiresTransaction true if transaction required
|
||||
*/
|
||||
/* protected */ SimplePatch(TransactionService transactionService, boolean requiresTransaction)
|
||||
{
|
||||
setTransactionService(transactionService);
|
||||
setId("SimplePatch");
|
||||
setDescription("This is a simple patch");
|
||||
setFixesFromSchema(0);
|
||||
setFixesToSchema(1000);
|
||||
setTargetSchema(1001);
|
||||
setRequiresTransaction(requiresTransaction);
|
||||
}
|
||||
|
||||
/**
|
||||
* Does nothing
|
||||
*
|
||||
* @return Returns a success or failure message dependent on the constructor used
|
||||
*/
|
||||
@Override
|
||||
protected String applyInternal() throws Exception
|
||||
{
|
||||
return MSG_SUCCESS;
|
||||
}
|
||||
|
||||
}
|
||||
package org.alfresco.repo.admin.patch;
|
||||
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
|
||||
public class SimplePatch extends AbstractPatch
|
||||
{
|
||||
public static final String MSG_SUCCESS = "SimplePatch applied successfully";
|
||||
|
||||
/**
|
||||
* Default constructor for Spring config
|
||||
*/
|
||||
public SimplePatch()
|
||||
{
|
||||
}
|
||||
|
||||
/**
|
||||
* Overrides the base class version to do nothing, i.e. it does not self-register
|
||||
*/
|
||||
@Override
|
||||
public void init()
|
||||
{
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper constructor for some tests. Default properties are set automatically.
|
||||
*
|
||||
* @param transactionService TransactionService
|
||||
* @param requiresTransaction true if transaction required
|
||||
*/
|
||||
/* protected */ SimplePatch(TransactionService transactionService, boolean requiresTransaction)
|
||||
{
|
||||
setTransactionService(transactionService);
|
||||
setId("SimplePatch");
|
||||
setDescription("This is a simple patch");
|
||||
setFixesFromSchema(0);
|
||||
setFixesToSchema(1000);
|
||||
setTargetSchema(1001);
|
||||
setRequiresTransaction(requiresTransaction);
|
||||
}
|
||||
|
||||
/**
|
||||
* Does nothing
|
||||
*
|
||||
* @return Returns a success or failure message dependent on the constructor used
|
||||
*/
|
||||
@Override
|
||||
protected String applyInternal() throws Exception
|
||||
{
|
||||
return MSG_SUCCESS;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -1,245 +1,245 @@
|
||||
package org.alfresco.repo.admin.patch.impl;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.email.server.AliasableAspect;
|
||||
import org.alfresco.email.server.EmailServerModel;
|
||||
import org.alfresco.repo.admin.patch.AbstractPatch;
|
||||
import org.alfresco.repo.batch.BatchProcessWorkProvider;
|
||||
import org.alfresco.repo.batch.BatchProcessor;
|
||||
import org.alfresco.repo.batch.BatchProcessor.BatchProcessWorker;
|
||||
import org.alfresco.repo.domain.node.NodeDAO;
|
||||
import org.alfresco.repo.domain.patch.PatchDAO;
|
||||
import org.alfresco.repo.domain.qname.QNameDAO;
|
||||
import org.alfresco.repo.policy.BehaviourFilter;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper;
|
||||
import org.alfresco.service.cmr.attributes.AttributeService;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.util.Pair;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
|
||||
/**
|
||||
* Patch to duplicate the AliasableAspect into the attributes service.
|
||||
*
|
||||
* Inbound email.
|
||||
*
|
||||
* @author mrogers
|
||||
*
|
||||
*/
|
||||
public class AliasableAspectPatch extends AbstractPatch
|
||||
{
|
||||
private static final String MSG_SUCCESS = "patch.emailAliasableAspect.result";
|
||||
|
||||
private AttributeService attributeService;
|
||||
private NodeDAO nodeDAO;
|
||||
private PatchDAO patchDAO;
|
||||
private QNameDAO qnameDAO;
|
||||
private BehaviourFilter behaviourFilter;
|
||||
|
||||
private final int batchThreads = 3;
|
||||
private final int batchSize = 40;
|
||||
private final long count = batchThreads * batchSize;
|
||||
|
||||
private static Log logger = LogFactory.getLog(AliasableAspectPatch.class);
|
||||
|
||||
|
||||
@Override
|
||||
protected String applyInternal() throws Exception
|
||||
{
|
||||
BatchProcessWorkProvider<NodeRef> workProvider = new BatchProcessWorkProvider<NodeRef>()
|
||||
{
|
||||
final List<NodeRef> result = new ArrayList<NodeRef>();
|
||||
|
||||
Long aspectQNameId = 0L;
|
||||
long maxNodeId = getPatchDAO().getMaxAdmNodeID();
|
||||
|
||||
long minSearchNodeId = 1;
|
||||
long maxSearchNodeId = count;
|
||||
|
||||
Pair<Long, QName> val = getQnameDAO().getQName(EmailServerModel.ASPECT_ALIASABLE );
|
||||
|
||||
public int getTotalEstimatedWorkSize()
|
||||
{
|
||||
return result.size();
|
||||
}
|
||||
|
||||
public Collection<NodeRef> getNextWork()
|
||||
{
|
||||
if(val != null)
|
||||
{
|
||||
Long aspectQNameId = val.getFirst();
|
||||
|
||||
result.clear();
|
||||
|
||||
while (result.isEmpty() && minSearchNodeId < maxNodeId)
|
||||
{
|
||||
List<Long> nodeids = getPatchDAO().getNodesByAspectQNameId(aspectQNameId, minSearchNodeId, maxSearchNodeId);
|
||||
|
||||
for(Long nodeid : nodeids)
|
||||
{
|
||||
NodeRef.Status status = getNodeDAO().getNodeIdStatus(nodeid);
|
||||
if(!status.isDeleted())
|
||||
{
|
||||
result.add(status.getNodeRef());
|
||||
}
|
||||
}
|
||||
minSearchNodeId = minSearchNodeId + count;
|
||||
maxSearchNodeId = maxSearchNodeId + count;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper();
|
||||
// Configure the helper to run in read-only mode
|
||||
// MNT-10764
|
||||
txnHelper.setForceWritable(true);
|
||||
|
||||
BatchProcessor<NodeRef> batchProcessor = new BatchProcessor<NodeRef>(
|
||||
"AliasableAspectPatch",
|
||||
txnHelper,
|
||||
workProvider,
|
||||
batchThreads,
|
||||
batchSize,
|
||||
applicationEventPublisher,
|
||||
logger,
|
||||
1000);
|
||||
|
||||
BatchProcessWorker<NodeRef> worker = new BatchProcessWorker<NodeRef>()
|
||||
{
|
||||
|
||||
public void afterProcess() throws Throwable
|
||||
{
|
||||
}
|
||||
|
||||
public void beforeProcess() throws Throwable
|
||||
{
|
||||
}
|
||||
|
||||
public String getIdentifier(NodeRef entry)
|
||||
{
|
||||
return entry.toString();
|
||||
}
|
||||
|
||||
public void process(NodeRef entry) throws Throwable
|
||||
{
|
||||
String alias = (String)nodeService.getProperty(entry, EmailServerModel.PROP_ALIAS);
|
||||
if(alias != null)
|
||||
{
|
||||
NodeRef existing = (NodeRef) getAttributeService().getAttribute(AliasableAspect.ALIASABLE_ATTRIBUTE_KEY_1,
|
||||
AliasableAspect.ALIASABLE_ATTRIBUTE_KEY_2,
|
||||
AliasableAspect.normaliseAlias(alias));
|
||||
|
||||
if(existing != null)
|
||||
{
|
||||
if(!existing.equals(entry))
|
||||
{
|
||||
// alias is used by more than one node - warning of some sort?
|
||||
if(logger.isWarnEnabled())
|
||||
{
|
||||
logger.warn("Email alias is not unique, alias:" + alias + " nodeRef:" + entry);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
behaviourFilter.disableBehaviour(EmailServerModel.ASPECT_ALIASABLE);
|
||||
nodeService.removeAspect(entry, EmailServerModel.ASPECT_ALIASABLE);
|
||||
|
||||
}
|
||||
finally
|
||||
{
|
||||
behaviourFilter.enableBehaviour(EmailServerModel.ASPECT_ALIASABLE);
|
||||
}
|
||||
}
|
||||
|
||||
// else do nothing - attribute already exists.
|
||||
}
|
||||
else
|
||||
{
|
||||
if(logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("creating email alias attribute for " + alias);
|
||||
}
|
||||
getAttributeService().createAttribute(entry, AliasableAspect.ALIASABLE_ATTRIBUTE_KEY_1, AliasableAspect.ALIASABLE_ATTRIBUTE_KEY_2, AliasableAspect.normaliseAlias(alias));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
// Now set the batch processor to work
|
||||
|
||||
batchProcessor.process(worker, true);
|
||||
|
||||
return I18NUtil.getMessage(MSG_SUCCESS);
|
||||
}
|
||||
|
||||
|
||||
public void setAttributeService(AttributeService attributeService)
|
||||
{
|
||||
this.attributeService = attributeService;
|
||||
}
|
||||
|
||||
|
||||
public AttributeService getAttributeService()
|
||||
{
|
||||
return attributeService;
|
||||
}
|
||||
|
||||
|
||||
public void setNodeDAO(NodeDAO nodeDAO)
|
||||
{
|
||||
this.nodeDAO = nodeDAO;
|
||||
}
|
||||
|
||||
|
||||
public NodeDAO getNodeDAO()
|
||||
{
|
||||
return nodeDAO;
|
||||
}
|
||||
|
||||
|
||||
public void setPatchDAO(PatchDAO patchDAO)
|
||||
{
|
||||
this.patchDAO = patchDAO;
|
||||
}
|
||||
|
||||
|
||||
public PatchDAO getPatchDAO()
|
||||
{
|
||||
return patchDAO;
|
||||
}
|
||||
|
||||
|
||||
public void setQnameDAO(QNameDAO qnameDAO)
|
||||
{
|
||||
this.qnameDAO = qnameDAO;
|
||||
}
|
||||
|
||||
|
||||
public QNameDAO getQnameDAO()
|
||||
{
|
||||
return qnameDAO;
|
||||
}
|
||||
|
||||
|
||||
public void setBehaviourFilter(BehaviourFilter behaviourFilter)
|
||||
{
|
||||
this.behaviourFilter = behaviourFilter;
|
||||
}
|
||||
|
||||
|
||||
public BehaviourFilter getBehaviourFilter()
|
||||
{
|
||||
return behaviourFilter;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
package org.alfresco.repo.admin.patch.impl;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.email.server.AliasableAspect;
|
||||
import org.alfresco.email.server.EmailServerModel;
|
||||
import org.alfresco.repo.admin.patch.AbstractPatch;
|
||||
import org.alfresco.repo.batch.BatchProcessWorkProvider;
|
||||
import org.alfresco.repo.batch.BatchProcessor;
|
||||
import org.alfresco.repo.batch.BatchProcessor.BatchProcessWorker;
|
||||
import org.alfresco.repo.domain.node.NodeDAO;
|
||||
import org.alfresco.repo.domain.patch.PatchDAO;
|
||||
import org.alfresco.repo.domain.qname.QNameDAO;
|
||||
import org.alfresco.repo.policy.BehaviourFilter;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper;
|
||||
import org.alfresco.service.cmr.attributes.AttributeService;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.util.Pair;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
|
||||
/**
|
||||
* Patch to duplicate the AliasableAspect into the attributes service.
|
||||
*
|
||||
* Inbound email.
|
||||
*
|
||||
* @author mrogers
|
||||
*
|
||||
*/
|
||||
public class AliasableAspectPatch extends AbstractPatch
|
||||
{
|
||||
private static final String MSG_SUCCESS = "patch.emailAliasableAspect.result";
|
||||
|
||||
private AttributeService attributeService;
|
||||
private NodeDAO nodeDAO;
|
||||
private PatchDAO patchDAO;
|
||||
private QNameDAO qnameDAO;
|
||||
private BehaviourFilter behaviourFilter;
|
||||
|
||||
private final int batchThreads = 3;
|
||||
private final int batchSize = 40;
|
||||
private final long count = batchThreads * batchSize;
|
||||
|
||||
private static Log logger = LogFactory.getLog(AliasableAspectPatch.class);
|
||||
|
||||
|
||||
@Override
|
||||
protected String applyInternal() throws Exception
|
||||
{
|
||||
BatchProcessWorkProvider<NodeRef> workProvider = new BatchProcessWorkProvider<NodeRef>()
|
||||
{
|
||||
final List<NodeRef> result = new ArrayList<NodeRef>();
|
||||
|
||||
Long aspectQNameId = 0L;
|
||||
long maxNodeId = getPatchDAO().getMaxAdmNodeID();
|
||||
|
||||
long minSearchNodeId = 1;
|
||||
long maxSearchNodeId = count;
|
||||
|
||||
Pair<Long, QName> val = getQnameDAO().getQName(EmailServerModel.ASPECT_ALIASABLE );
|
||||
|
||||
public int getTotalEstimatedWorkSize()
|
||||
{
|
||||
return result.size();
|
||||
}
|
||||
|
||||
public Collection<NodeRef> getNextWork()
|
||||
{
|
||||
if(val != null)
|
||||
{
|
||||
Long aspectQNameId = val.getFirst();
|
||||
|
||||
result.clear();
|
||||
|
||||
while (result.isEmpty() && minSearchNodeId < maxNodeId)
|
||||
{
|
||||
List<Long> nodeids = getPatchDAO().getNodesByAspectQNameId(aspectQNameId, minSearchNodeId, maxSearchNodeId);
|
||||
|
||||
for(Long nodeid : nodeids)
|
||||
{
|
||||
NodeRef.Status status = getNodeDAO().getNodeIdStatus(nodeid);
|
||||
if(!status.isDeleted())
|
||||
{
|
||||
result.add(status.getNodeRef());
|
||||
}
|
||||
}
|
||||
minSearchNodeId = minSearchNodeId + count;
|
||||
maxSearchNodeId = maxSearchNodeId + count;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper();
|
||||
// Configure the helper to run in read-only mode
|
||||
// MNT-10764
|
||||
txnHelper.setForceWritable(true);
|
||||
|
||||
BatchProcessor<NodeRef> batchProcessor = new BatchProcessor<NodeRef>(
|
||||
"AliasableAspectPatch",
|
||||
txnHelper,
|
||||
workProvider,
|
||||
batchThreads,
|
||||
batchSize,
|
||||
applicationEventPublisher,
|
||||
logger,
|
||||
1000);
|
||||
|
||||
BatchProcessWorker<NodeRef> worker = new BatchProcessWorker<NodeRef>()
|
||||
{
|
||||
|
||||
public void afterProcess() throws Throwable
|
||||
{
|
||||
}
|
||||
|
||||
public void beforeProcess() throws Throwable
|
||||
{
|
||||
}
|
||||
|
||||
public String getIdentifier(NodeRef entry)
|
||||
{
|
||||
return entry.toString();
|
||||
}
|
||||
|
||||
public void process(NodeRef entry) throws Throwable
|
||||
{
|
||||
String alias = (String)nodeService.getProperty(entry, EmailServerModel.PROP_ALIAS);
|
||||
if(alias != null)
|
||||
{
|
||||
NodeRef existing = (NodeRef) getAttributeService().getAttribute(AliasableAspect.ALIASABLE_ATTRIBUTE_KEY_1,
|
||||
AliasableAspect.ALIASABLE_ATTRIBUTE_KEY_2,
|
||||
AliasableAspect.normaliseAlias(alias));
|
||||
|
||||
if(existing != null)
|
||||
{
|
||||
if(!existing.equals(entry))
|
||||
{
|
||||
// alias is used by more than one node - warning of some sort?
|
||||
if(logger.isWarnEnabled())
|
||||
{
|
||||
logger.warn("Email alias is not unique, alias:" + alias + " nodeRef:" + entry);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
behaviourFilter.disableBehaviour(EmailServerModel.ASPECT_ALIASABLE);
|
||||
nodeService.removeAspect(entry, EmailServerModel.ASPECT_ALIASABLE);
|
||||
|
||||
}
|
||||
finally
|
||||
{
|
||||
behaviourFilter.enableBehaviour(EmailServerModel.ASPECT_ALIASABLE);
|
||||
}
|
||||
}
|
||||
|
||||
// else do nothing - attribute already exists.
|
||||
}
|
||||
else
|
||||
{
|
||||
if(logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("creating email alias attribute for " + alias);
|
||||
}
|
||||
getAttributeService().createAttribute(entry, AliasableAspect.ALIASABLE_ATTRIBUTE_KEY_1, AliasableAspect.ALIASABLE_ATTRIBUTE_KEY_2, AliasableAspect.normaliseAlias(alias));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
// Now set the batch processor to work
|
||||
|
||||
batchProcessor.process(worker, true);
|
||||
|
||||
return I18NUtil.getMessage(MSG_SUCCESS);
|
||||
}
|
||||
|
||||
|
||||
public void setAttributeService(AttributeService attributeService)
|
||||
{
|
||||
this.attributeService = attributeService;
|
||||
}
|
||||
|
||||
|
||||
public AttributeService getAttributeService()
|
||||
{
|
||||
return attributeService;
|
||||
}
|
||||
|
||||
|
||||
public void setNodeDAO(NodeDAO nodeDAO)
|
||||
{
|
||||
this.nodeDAO = nodeDAO;
|
||||
}
|
||||
|
||||
|
||||
public NodeDAO getNodeDAO()
|
||||
{
|
||||
return nodeDAO;
|
||||
}
|
||||
|
||||
|
||||
public void setPatchDAO(PatchDAO patchDAO)
|
||||
{
|
||||
this.patchDAO = patchDAO;
|
||||
}
|
||||
|
||||
|
||||
public PatchDAO getPatchDAO()
|
||||
{
|
||||
return patchDAO;
|
||||
}
|
||||
|
||||
|
||||
public void setQnameDAO(QNameDAO qnameDAO)
|
||||
{
|
||||
this.qnameDAO = qnameDAO;
|
||||
}
|
||||
|
||||
|
||||
public QNameDAO getQnameDAO()
|
||||
{
|
||||
return qnameDAO;
|
||||
}
|
||||
|
||||
|
||||
public void setBehaviourFilter(BehaviourFilter behaviourFilter)
|
||||
{
|
||||
this.behaviourFilter = behaviourFilter;
|
||||
}
|
||||
|
||||
|
||||
public BehaviourFilter getBehaviourFilter()
|
||||
{
|
||||
return behaviourFilter;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@@ -1,130 +1,130 @@
|
||||
package org.alfresco.repo.admin.patch.impl;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.alfresco.repo.admin.patch.AbstractPatch;
|
||||
import org.alfresco.repo.workflow.BPMEngineRegistry;
|
||||
import org.alfresco.repo.workflow.WorkflowDeployer;
|
||||
import org.alfresco.service.cmr.admin.PatchException;
|
||||
import org.alfresco.service.cmr.workflow.WorkflowAdminService;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
|
||||
/**
|
||||
* Generic patch that re-deploys a workflow definition
|
||||
*
|
||||
* @author David Caruana
|
||||
*/
|
||||
public class GenericWorkflowPatch extends AbstractPatch implements ApplicationContextAware
|
||||
{
|
||||
private static final String MSG_DEPLOYED = "patch.genericWorkflow.result.deployed";
|
||||
private static final String MSG_UNDEPLOYED = "patch.genericWorkflow.result.undeployed";
|
||||
private static final String ERR_PROPERTY_REQUIRED = "patch.genericWorkflow.property_required";
|
||||
private static final String MSG_ERROR_ENGINE_DEACTIVATED = "patch.genericWorkflow.error_engine_deactivated";
|
||||
|
||||
private ApplicationContext applicationContext;
|
||||
private List<Properties> workflowDefinitions;
|
||||
private List<String> undeployWorkflowNames;
|
||||
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext)
|
||||
*/
|
||||
public void setApplicationContext(ApplicationContext applicationContext)
|
||||
throws BeansException
|
||||
{
|
||||
this.applicationContext = applicationContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the Workflow Definitions
|
||||
*
|
||||
* @param workflowDefinitions List<Properties>
|
||||
*/
|
||||
public void setWorkflowDefinitions(List<Properties> workflowDefinitions)
|
||||
{
|
||||
this.workflowDefinitions = workflowDefinitions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the Workflow Names to be undeployed
|
||||
*
|
||||
* @param undeployWorkflowNames list with names
|
||||
*/
|
||||
public void setUndeployWorkflowNames(List<String> undeployWorkflowNames)
|
||||
{
|
||||
this.undeployWorkflowNames = undeployWorkflowNames;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void checkProperties()
|
||||
{
|
||||
if ( (workflowDefinitions == null) && (undeployWorkflowNames == null) )
|
||||
{
|
||||
throw new PatchException(ERR_PROPERTY_REQUIRED, "workflowDefinitions", "undeployWorkflowNames", this);
|
||||
}
|
||||
super.checkProperties();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String applyInternal() throws Exception
|
||||
{
|
||||
WorkflowDeployer deployer = (WorkflowDeployer)applicationContext.getBean("workflowPatchDeployer");
|
||||
WorkflowAdminService workflowAdminService = (WorkflowAdminService)applicationContext.getBean("workflowAdminService");
|
||||
|
||||
if(workflowDefinitions != null)
|
||||
{
|
||||
for (Properties props : workflowDefinitions)
|
||||
{
|
||||
props.put(WorkflowDeployer.REDEPLOY, "true");
|
||||
}
|
||||
deployer.setWorkflowDefinitions(workflowDefinitions);
|
||||
deployer.init();
|
||||
}
|
||||
|
||||
int undeployed = 0;
|
||||
StringBuilder errorMessages = new StringBuilder();
|
||||
if(undeployWorkflowNames != null)
|
||||
{
|
||||
List<String> undeployableWorkflows = new ArrayList<String>(undeployWorkflowNames);
|
||||
for(String workflowName : undeployWorkflowNames)
|
||||
{
|
||||
String engineId = BPMEngineRegistry.getEngineId(workflowName);
|
||||
if (workflowAdminService.isEngineEnabled(engineId))
|
||||
{
|
||||
undeployableWorkflows.add(workflowName);
|
||||
}
|
||||
else
|
||||
{
|
||||
errorMessages.append(I18NUtil.getMessage(MSG_ERROR_ENGINE_DEACTIVATED, workflowName, engineId));
|
||||
}
|
||||
}
|
||||
undeployed = deployer.undeploy(undeployableWorkflows);
|
||||
}
|
||||
|
||||
// done
|
||||
StringBuilder msg = new StringBuilder();
|
||||
if(workflowDefinitions != null)
|
||||
{
|
||||
msg.append(I18NUtil.getMessage(MSG_DEPLOYED, workflowDefinitions.size()));
|
||||
}
|
||||
if(undeployWorkflowNames != null)
|
||||
{
|
||||
if(msg.length() > 0)
|
||||
{
|
||||
msg.append(' ');
|
||||
}
|
||||
msg.append(I18NUtil.getMessage(MSG_UNDEPLOYED, undeployed));
|
||||
}
|
||||
if(errorMessages.length() > 0)
|
||||
{
|
||||
msg.append(errorMessages);
|
||||
}
|
||||
return msg.toString();
|
||||
}
|
||||
|
||||
}
|
||||
package org.alfresco.repo.admin.patch.impl;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.alfresco.repo.admin.patch.AbstractPatch;
|
||||
import org.alfresco.repo.workflow.BPMEngineRegistry;
|
||||
import org.alfresco.repo.workflow.WorkflowDeployer;
|
||||
import org.alfresco.service.cmr.admin.PatchException;
|
||||
import org.alfresco.service.cmr.workflow.WorkflowAdminService;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
|
||||
/**
|
||||
* Generic patch that re-deploys a workflow definition
|
||||
*
|
||||
* @author David Caruana
|
||||
*/
|
||||
public class GenericWorkflowPatch extends AbstractPatch implements ApplicationContextAware
|
||||
{
|
||||
private static final String MSG_DEPLOYED = "patch.genericWorkflow.result.deployed";
|
||||
private static final String MSG_UNDEPLOYED = "patch.genericWorkflow.result.undeployed";
|
||||
private static final String ERR_PROPERTY_REQUIRED = "patch.genericWorkflow.property_required";
|
||||
private static final String MSG_ERROR_ENGINE_DEACTIVATED = "patch.genericWorkflow.error_engine_deactivated";
|
||||
|
||||
private ApplicationContext applicationContext;
|
||||
private List<Properties> workflowDefinitions;
|
||||
private List<String> undeployWorkflowNames;
|
||||
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext)
|
||||
*/
|
||||
public void setApplicationContext(ApplicationContext applicationContext)
|
||||
throws BeansException
|
||||
{
|
||||
this.applicationContext = applicationContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the Workflow Definitions
|
||||
*
|
||||
* @param workflowDefinitions List<Properties>
|
||||
*/
|
||||
public void setWorkflowDefinitions(List<Properties> workflowDefinitions)
|
||||
{
|
||||
this.workflowDefinitions = workflowDefinitions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the Workflow Names to be undeployed
|
||||
*
|
||||
* @param undeployWorkflowNames list with names
|
||||
*/
|
||||
public void setUndeployWorkflowNames(List<String> undeployWorkflowNames)
|
||||
{
|
||||
this.undeployWorkflowNames = undeployWorkflowNames;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void checkProperties()
|
||||
{
|
||||
if ( (workflowDefinitions == null) && (undeployWorkflowNames == null) )
|
||||
{
|
||||
throw new PatchException(ERR_PROPERTY_REQUIRED, "workflowDefinitions", "undeployWorkflowNames", this);
|
||||
}
|
||||
super.checkProperties();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String applyInternal() throws Exception
|
||||
{
|
||||
WorkflowDeployer deployer = (WorkflowDeployer)applicationContext.getBean("workflowPatchDeployer");
|
||||
WorkflowAdminService workflowAdminService = (WorkflowAdminService)applicationContext.getBean("workflowAdminService");
|
||||
|
||||
if(workflowDefinitions != null)
|
||||
{
|
||||
for (Properties props : workflowDefinitions)
|
||||
{
|
||||
props.put(WorkflowDeployer.REDEPLOY, "true");
|
||||
}
|
||||
deployer.setWorkflowDefinitions(workflowDefinitions);
|
||||
deployer.init();
|
||||
}
|
||||
|
||||
int undeployed = 0;
|
||||
StringBuilder errorMessages = new StringBuilder();
|
||||
if(undeployWorkflowNames != null)
|
||||
{
|
||||
List<String> undeployableWorkflows = new ArrayList<String>(undeployWorkflowNames);
|
||||
for(String workflowName : undeployWorkflowNames)
|
||||
{
|
||||
String engineId = BPMEngineRegistry.getEngineId(workflowName);
|
||||
if (workflowAdminService.isEngineEnabled(engineId))
|
||||
{
|
||||
undeployableWorkflows.add(workflowName);
|
||||
}
|
||||
else
|
||||
{
|
||||
errorMessages.append(I18NUtil.getMessage(MSG_ERROR_ENGINE_DEACTIVATED, workflowName, engineId));
|
||||
}
|
||||
}
|
||||
undeployed = deployer.undeploy(undeployableWorkflows);
|
||||
}
|
||||
|
||||
// done
|
||||
StringBuilder msg = new StringBuilder();
|
||||
if(workflowDefinitions != null)
|
||||
{
|
||||
msg.append(I18NUtil.getMessage(MSG_DEPLOYED, workflowDefinitions.size()));
|
||||
}
|
||||
if(undeployWorkflowNames != null)
|
||||
{
|
||||
if(msg.length() > 0)
|
||||
{
|
||||
msg.append(' ');
|
||||
}
|
||||
msg.append(I18NUtil.getMessage(MSG_UNDEPLOYED, undeployed));
|
||||
}
|
||||
if(errorMessages.length() > 0)
|
||||
{
|
||||
msg.append(errorMessages);
|
||||
}
|
||||
return msg.toString();
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -1,65 +1,65 @@
|
||||
package org.alfresco.repo.admin.patch.impl;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.admin.patch.AbstractPatch;
|
||||
import org.alfresco.repo.model.filefolder.HiddenAspect;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.site.SiteInfo;
|
||||
import org.alfresco.service.cmr.site.SiteService;
|
||||
import org.alfresco.service.namespace.RegexQNamePattern;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
|
||||
public class SWSDPPatch extends AbstractPatch
|
||||
{
|
||||
private static final String MSG_SITE_PATCHED = "patch.swsdpPatch.success";
|
||||
private static final String MSG_SKIPPED = "patch.swsdpPatch.skipped";
|
||||
private static final String MSG_MISSING_SURFCONFIG = "patch.swsdpPatch.missingSurfConfig";
|
||||
|
||||
private SiteService siteService;
|
||||
private HiddenAspect hiddenAspect;
|
||||
|
||||
public void setSiteService(SiteService siteService)
|
||||
{
|
||||
this.siteService = siteService;
|
||||
}
|
||||
|
||||
public void setNodeService(NodeService nodeService)
|
||||
{
|
||||
this.nodeService = nodeService;
|
||||
}
|
||||
|
||||
public void setHiddenAspect(HiddenAspect hiddenAspect)
|
||||
{
|
||||
this.hiddenAspect = hiddenAspect;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String applyInternal() throws Exception
|
||||
{
|
||||
SiteInfo siteInfo = siteService.getSite("swsdp");
|
||||
if(siteInfo != null)
|
||||
{
|
||||
NodeRef nodeRef = siteInfo.getNodeRef();
|
||||
NodeRef surfConfigNodeRef = nodeService.getChildByName(nodeRef, ContentModel.ASSOC_CONTAINS, "surf-config");
|
||||
if(surfConfigNodeRef == null)
|
||||
{
|
||||
return I18NUtil.getMessage(MSG_MISSING_SURFCONFIG);
|
||||
}
|
||||
else
|
||||
{
|
||||
for(ChildAssociationRef childRef : nodeService.getChildAssocs(surfConfigNodeRef, ContentModel.ASSOC_CONTAINS, RegexQNamePattern.MATCH_ALL))
|
||||
{
|
||||
hiddenAspect.showNode(childRef.getChildRef(), true);
|
||||
}
|
||||
}
|
||||
|
||||
return I18NUtil.getMessage(MSG_SITE_PATCHED);
|
||||
}
|
||||
else
|
||||
{
|
||||
return I18NUtil.getMessage(MSG_SKIPPED);
|
||||
}
|
||||
}
|
||||
}
|
||||
package org.alfresco.repo.admin.patch.impl;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.admin.patch.AbstractPatch;
|
||||
import org.alfresco.repo.model.filefolder.HiddenAspect;
|
||||
import org.alfresco.service.cmr.repository.ChildAssociationRef;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.site.SiteInfo;
|
||||
import org.alfresco.service.cmr.site.SiteService;
|
||||
import org.alfresco.service.namespace.RegexQNamePattern;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
|
||||
public class SWSDPPatch extends AbstractPatch
|
||||
{
|
||||
private static final String MSG_SITE_PATCHED = "patch.swsdpPatch.success";
|
||||
private static final String MSG_SKIPPED = "patch.swsdpPatch.skipped";
|
||||
private static final String MSG_MISSING_SURFCONFIG = "patch.swsdpPatch.missingSurfConfig";
|
||||
|
||||
private SiteService siteService;
|
||||
private HiddenAspect hiddenAspect;
|
||||
|
||||
public void setSiteService(SiteService siteService)
|
||||
{
|
||||
this.siteService = siteService;
|
||||
}
|
||||
|
||||
public void setNodeService(NodeService nodeService)
|
||||
{
|
||||
this.nodeService = nodeService;
|
||||
}
|
||||
|
||||
public void setHiddenAspect(HiddenAspect hiddenAspect)
|
||||
{
|
||||
this.hiddenAspect = hiddenAspect;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String applyInternal() throws Exception
|
||||
{
|
||||
SiteInfo siteInfo = siteService.getSite("swsdp");
|
||||
if(siteInfo != null)
|
||||
{
|
||||
NodeRef nodeRef = siteInfo.getNodeRef();
|
||||
NodeRef surfConfigNodeRef = nodeService.getChildByName(nodeRef, ContentModel.ASSOC_CONTAINS, "surf-config");
|
||||
if(surfConfigNodeRef == null)
|
||||
{
|
||||
return I18NUtil.getMessage(MSG_MISSING_SURFCONFIG);
|
||||
}
|
||||
else
|
||||
{
|
||||
for(ChildAssociationRef childRef : nodeService.getChildAssocs(surfConfigNodeRef, ContentModel.ASSOC_CONTAINS, RegexQNamePattern.MATCH_ALL))
|
||||
{
|
||||
hiddenAspect.showNode(childRef.getChildRef(), true);
|
||||
}
|
||||
}
|
||||
|
||||
return I18NUtil.getMessage(MSG_SITE_PATCHED);
|
||||
}
|
||||
else
|
||||
{
|
||||
return I18NUtil.getMessage(MSG_SKIPPED);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,235 +1,235 @@
|
||||
package org.alfresco.repo.admin.patch.impl;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.lock.JobLockService;
|
||||
import org.alfresco.repo.lock.JobLockService.JobLockRefreshCallback;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.service.cmr.admin.PatchException;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.quartz.Job;
|
||||
import org.quartz.JobDataMap;
|
||||
import org.quartz.JobExecutionContext;
|
||||
import org.quartz.JobExecutionException;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
|
||||
/**
|
||||
* The SharedFolderPatch is a Generic Bootstrap Patch with the extra ability to
|
||||
* rename an existing folder that is in the way (in a different namespace).
|
||||
* <p>
|
||||
* The first use-case is when there is a child called cm:shared and we want to patch a folder with app:shared
|
||||
*
|
||||
* @author mrogers
|
||||
*/
|
||||
public class SharedFolderPatch extends GenericBootstrapPatch
|
||||
{
|
||||
private JobLockService jobLockService;
|
||||
|
||||
private long LOCK_TIME_TO_LIVE=10000;
|
||||
private long LOCK_REFRESH_TIME=5000;
|
||||
|
||||
private String renamePath;
|
||||
|
||||
private Log logger = LogFactory.getLog(SharedFolderPatch.class);
|
||||
|
||||
private static final String MSG_RENAMED = "patch.sharedFolder.result.renamed";
|
||||
|
||||
/**
|
||||
* Run the Shared Folder Patch asynchronously after bootstrap.
|
||||
*/
|
||||
public void executeAsync()
|
||||
{
|
||||
// Lock the push
|
||||
QName lockQName = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "patch.sharedFolder");
|
||||
String lockToken = jobLockService.getLock(lockQName, LOCK_TIME_TO_LIVE, 0, 1);
|
||||
SharedFolderPatchCallback callback = new SharedFolderPatchCallback();
|
||||
jobLockService.refreshLock(lockToken, lockQName, LOCK_REFRESH_TIME, callback);
|
||||
|
||||
try
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("SharedFolderPatch: job lock held");
|
||||
}
|
||||
|
||||
AuthenticationUtil.runAsSystem(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
applyAsync();
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (logger.isTraceEnabled())
|
||||
{
|
||||
logger.trace("PUSH: job finished");
|
||||
}
|
||||
|
||||
// Release the locks on the job and stop refreshing
|
||||
callback.isActive = false;
|
||||
jobLockService.releaseLock(lockToken, lockQName);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String applyInternal() throws Exception
|
||||
{
|
||||
StoreRef storeRef = importerBootstrap.getStoreRef();
|
||||
NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
|
||||
if (getRenamePath() != null)
|
||||
{
|
||||
List<NodeRef> results = searchService.selectNodes(
|
||||
rootNodeRef,
|
||||
getRenamePath(),
|
||||
null,
|
||||
namespaceService,
|
||||
false);
|
||||
|
||||
if (results.size() > 1)
|
||||
{
|
||||
throw new PatchException(ERR_MULTIPLE_FOUND, renamePath);
|
||||
}
|
||||
else if (results.size() == 1)
|
||||
{
|
||||
if(logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("There is an existing node in the way path:" + getRenamePath());
|
||||
}
|
||||
// A node already exists that we must rename.
|
||||
NodeRef existingNodeRef = results.get(0);
|
||||
|
||||
// get the path of the parent node e.g. company_home
|
||||
LinkedList<String> folderElements = new LinkedList<String>(Arrays.asList(getRenamePath().split("/")));
|
||||
folderElements.removeLast();
|
||||
|
||||
StringBuffer parentPath = new StringBuffer();
|
||||
|
||||
for(String folder : folderElements)
|
||||
{
|
||||
parentPath.append("/");
|
||||
parentPath.append(folder);
|
||||
}
|
||||
|
||||
List<NodeRef> parentResults = searchService.selectNodes(
|
||||
rootNodeRef,
|
||||
parentPath.toString(),
|
||||
null,
|
||||
namespaceService,
|
||||
false);
|
||||
|
||||
if(parentResults.size()==1)
|
||||
{
|
||||
|
||||
NodeRef parentNodeRef = parentResults.get(0);
|
||||
|
||||
if(logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Found the parent node - doing a move parentNodeRef:" + parentNodeRef);
|
||||
}
|
||||
|
||||
// rename the existing node
|
||||
nodeService.moveNode(existingNodeRef, parentNodeRef, ContentModel.ASSOC_CONTAINS, QName.createQName( NamespaceService.APP_MODEL_1_0_URI, "shared"));
|
||||
return I18NUtil.getMessage(MSG_RENAMED, renamePath);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Something has gone horribly wrong if we get here - we have multiple parents, or none despite finding the node earlier
|
||||
throw new PatchException(ERR_MULTIPLE_FOUND, parentPath.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Else run the normal GenericBootstrapPatch implementation
|
||||
|
||||
if(logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Node does not already exist, Running the Generic Bootstrap Patch");
|
||||
}
|
||||
return super.applyInternal();
|
||||
}
|
||||
|
||||
public void setRenamePath(String renamePath)
|
||||
{
|
||||
this.renamePath = renamePath;
|
||||
}
|
||||
|
||||
public String getRenamePath()
|
||||
{
|
||||
return renamePath;
|
||||
}
|
||||
|
||||
public void setJobLockService(JobLockService jobLockService)
|
||||
{
|
||||
this.jobLockService = jobLockService;
|
||||
}
|
||||
|
||||
public JobLockService getJobLockService()
|
||||
{
|
||||
return jobLockService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Job to initiate the {@link SharedFolderPatch} if it has been deferred
|
||||
*
|
||||
* @author Mark Rogers
|
||||
* @since 4.2
|
||||
*/
|
||||
public static class SharedFolderPatchJob implements Job
|
||||
{
|
||||
public SharedFolderPatchJob()
|
||||
{
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls the cleaner to do its work
|
||||
*/
|
||||
public void execute(JobExecutionContext context) throws JobExecutionException
|
||||
{
|
||||
JobDataMap jobData = context.getJobDetail().getJobDataMap();
|
||||
// extract the content cleaner to use
|
||||
Object sharedFolderPatchObj = jobData.get("sharedFolderPatch");
|
||||
if (sharedFolderPatchObj == null || !(sharedFolderPatchObj instanceof SharedFolderPatch))
|
||||
{
|
||||
throw new AlfrescoRuntimeException(
|
||||
"'sharedFolderPatch' data must contain valid 'SharedFolderPatch' reference");
|
||||
}
|
||||
|
||||
// Job Lock Here - should probably move into the patch service at some time.
|
||||
SharedFolderPatch sharedFolderPatch = (SharedFolderPatch) sharedFolderPatchObj;
|
||||
sharedFolderPatch.executeAsync();
|
||||
}
|
||||
}
|
||||
|
||||
private class SharedFolderPatchCallback implements JobLockRefreshCallback
|
||||
{
|
||||
public boolean isActive = true;
|
||||
|
||||
@Override
|
||||
public boolean isActive()
|
||||
{
|
||||
return isActive;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void lockReleased()
|
||||
{
|
||||
if (logger.isTraceEnabled())
|
||||
{
|
||||
logger.trace("lock released");
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
package org.alfresco.repo.admin.patch.impl;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.lock.JobLockService;
|
||||
import org.alfresco.repo.lock.JobLockService.JobLockRefreshCallback;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
||||
import org.alfresco.service.cmr.admin.PatchException;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.quartz.Job;
|
||||
import org.quartz.JobDataMap;
|
||||
import org.quartz.JobExecutionContext;
|
||||
import org.quartz.JobExecutionException;
|
||||
import org.springframework.extensions.surf.util.I18NUtil;
|
||||
|
||||
/**
|
||||
* The SharedFolderPatch is a Generic Bootstrap Patch with the extra ability to
|
||||
* rename an existing folder that is in the way (in a different namespace).
|
||||
* <p>
|
||||
* The first use-case is when there is a child called cm:shared and we want to patch a folder with app:shared
|
||||
*
|
||||
* @author mrogers
|
||||
*/
|
||||
public class SharedFolderPatch extends GenericBootstrapPatch
|
||||
{
|
||||
private JobLockService jobLockService;
|
||||
|
||||
private long LOCK_TIME_TO_LIVE=10000;
|
||||
private long LOCK_REFRESH_TIME=5000;
|
||||
|
||||
private String renamePath;
|
||||
|
||||
private Log logger = LogFactory.getLog(SharedFolderPatch.class);
|
||||
|
||||
private static final String MSG_RENAMED = "patch.sharedFolder.result.renamed";
|
||||
|
||||
/**
|
||||
* Run the Shared Folder Patch asynchronously after bootstrap.
|
||||
*/
|
||||
public void executeAsync()
|
||||
{
|
||||
// Lock the push
|
||||
QName lockQName = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "patch.sharedFolder");
|
||||
String lockToken = jobLockService.getLock(lockQName, LOCK_TIME_TO_LIVE, 0, 1);
|
||||
SharedFolderPatchCallback callback = new SharedFolderPatchCallback();
|
||||
jobLockService.refreshLock(lockToken, lockQName, LOCK_REFRESH_TIME, callback);
|
||||
|
||||
try
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("SharedFolderPatch: job lock held");
|
||||
}
|
||||
|
||||
AuthenticationUtil.runAsSystem(new RunAsWork<Void>()
|
||||
{
|
||||
public Void doWork() throws Exception
|
||||
{
|
||||
applyAsync();
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (logger.isTraceEnabled())
|
||||
{
|
||||
logger.trace("PUSH: job finished");
|
||||
}
|
||||
|
||||
// Release the locks on the job and stop refreshing
|
||||
callback.isActive = false;
|
||||
jobLockService.releaseLock(lockToken, lockQName);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String applyInternal() throws Exception
|
||||
{
|
||||
StoreRef storeRef = importerBootstrap.getStoreRef();
|
||||
NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
|
||||
if (getRenamePath() != null)
|
||||
{
|
||||
List<NodeRef> results = searchService.selectNodes(
|
||||
rootNodeRef,
|
||||
getRenamePath(),
|
||||
null,
|
||||
namespaceService,
|
||||
false);
|
||||
|
||||
if (results.size() > 1)
|
||||
{
|
||||
throw new PatchException(ERR_MULTIPLE_FOUND, renamePath);
|
||||
}
|
||||
else if (results.size() == 1)
|
||||
{
|
||||
if(logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("There is an existing node in the way path:" + getRenamePath());
|
||||
}
|
||||
// A node already exists that we must rename.
|
||||
NodeRef existingNodeRef = results.get(0);
|
||||
|
||||
// get the path of the parent node e.g. company_home
|
||||
LinkedList<String> folderElements = new LinkedList<String>(Arrays.asList(getRenamePath().split("/")));
|
||||
folderElements.removeLast();
|
||||
|
||||
StringBuffer parentPath = new StringBuffer();
|
||||
|
||||
for(String folder : folderElements)
|
||||
{
|
||||
parentPath.append("/");
|
||||
parentPath.append(folder);
|
||||
}
|
||||
|
||||
List<NodeRef> parentResults = searchService.selectNodes(
|
||||
rootNodeRef,
|
||||
parentPath.toString(),
|
||||
null,
|
||||
namespaceService,
|
||||
false);
|
||||
|
||||
if(parentResults.size()==1)
|
||||
{
|
||||
|
||||
NodeRef parentNodeRef = parentResults.get(0);
|
||||
|
||||
if(logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Found the parent node - doing a move parentNodeRef:" + parentNodeRef);
|
||||
}
|
||||
|
||||
// rename the existing node
|
||||
nodeService.moveNode(existingNodeRef, parentNodeRef, ContentModel.ASSOC_CONTAINS, QName.createQName( NamespaceService.APP_MODEL_1_0_URI, "shared"));
|
||||
return I18NUtil.getMessage(MSG_RENAMED, renamePath);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Something has gone horribly wrong if we get here - we have multiple parents, or none despite finding the node earlier
|
||||
throw new PatchException(ERR_MULTIPLE_FOUND, parentPath.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Else run the normal GenericBootstrapPatch implementation
|
||||
|
||||
if(logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Node does not already exist, Running the Generic Bootstrap Patch");
|
||||
}
|
||||
return super.applyInternal();
|
||||
}
|
||||
|
||||
public void setRenamePath(String renamePath)
|
||||
{
|
||||
this.renamePath = renamePath;
|
||||
}
|
||||
|
||||
public String getRenamePath()
|
||||
{
|
||||
return renamePath;
|
||||
}
|
||||
|
||||
public void setJobLockService(JobLockService jobLockService)
|
||||
{
|
||||
this.jobLockService = jobLockService;
|
||||
}
|
||||
|
||||
public JobLockService getJobLockService()
|
||||
{
|
||||
return jobLockService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Job to initiate the {@link SharedFolderPatch} if it has been deferred
|
||||
*
|
||||
* @author Mark Rogers
|
||||
* @since 4.2
|
||||
*/
|
||||
public static class SharedFolderPatchJob implements Job
|
||||
{
|
||||
public SharedFolderPatchJob()
|
||||
{
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls the cleaner to do its work
|
||||
*/
|
||||
public void execute(JobExecutionContext context) throws JobExecutionException
|
||||
{
|
||||
JobDataMap jobData = context.getJobDetail().getJobDataMap();
|
||||
// extract the content cleaner to use
|
||||
Object sharedFolderPatchObj = jobData.get("sharedFolderPatch");
|
||||
if (sharedFolderPatchObj == null || !(sharedFolderPatchObj instanceof SharedFolderPatch))
|
||||
{
|
||||
throw new AlfrescoRuntimeException(
|
||||
"'sharedFolderPatch' data must contain valid 'SharedFolderPatch' reference");
|
||||
}
|
||||
|
||||
// Job Lock Here - should probably move into the patch service at some time.
|
||||
SharedFolderPatch sharedFolderPatch = (SharedFolderPatch) sharedFolderPatchObj;
|
||||
sharedFolderPatch.executeAsync();
|
||||
}
|
||||
}
|
||||
|
||||
private class SharedFolderPatchCallback implements JobLockRefreshCallback
|
||||
{
|
||||
public boolean isActive = true;
|
||||
|
||||
@Override
|
||||
public boolean isActive()
|
||||
{
|
||||
return isActive;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void lockReleased()
|
||||
{
|
||||
if (logger.isTraceEnabled())
|
||||
{
|
||||
logger.trace("lock released");
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user