Merged 1.4 to HEAD

svn merge svn://svn.alfresco.com:3691/alfresco/BRANCHES/V1.4@4421 svn://svn.alfresco.com:3691/alfresco/BRANCHES/V1.4@4462 .
   svn resolved root\projects\repository\source\java\org\alfresco\repo\model\filefolder\FileFolderPerformanceTester.java
   svn revert root\projects\repository\config\alfresco\version.properties
   svn resolved  root\projects\3rd-party\.classpath


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@4661 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley
2006-12-19 16:15:44 +00:00
parent ced83b971b
commit 58ee1796a2
9 changed files with 139 additions and 80 deletions

View File

@@ -52,6 +52,22 @@ public class InvalidNameEndingPatch extends AbstractPatch
private SessionFactory sessionFactory;
private NodeDaoService nodeDaoService;
public static void main(String[] args)
{
String name = "fred. ... ";
int i = (name.length() == 0) ? 0 : name.length() - 1;
while (i >= 0 && (name.charAt(i) == '.' || name.charAt(i) == ' '))
{
i--;
}
String updatedName = (i == 0) ? "unnamed" : name.substring(0, i + 1);
System.out.println(updatedName);
}
public InvalidNameEndingPatch()
{
}
@@ -149,7 +165,7 @@ public class InvalidNameEndingPatch extends AbstractPatch
i--;
}
String updatedName = name.substring(0, i);
String updatedName = (i == 0) ? "unnamed" : name.substring(0, i + 1);
int idx = 0;
boolean applied = false;
while (!applied)

View File

@@ -62,10 +62,7 @@ public class EhCacheTracerJob implements Job
{
try
{
if (logger.isDebugEnabled())
{
execute();
}
execute();
}
catch (Throwable e)
{
@@ -86,6 +83,7 @@ public class EhCacheTracerJob implements Job
// get all the caches
String[] cacheNames = cacheManager.getCacheNames();
logger.debug("Dumping EHCache info:");
boolean analyzeAll = true;
for (String cacheName : cacheNames)
{
Cache cache = cacheManager.getCache(cacheName);
@@ -93,27 +91,39 @@ public class EhCacheTracerJob implements Job
{
continue;
}
Log cacheLogger = LogFactory.getLog(this.getClass().getName() + "." + cacheName);
// log each cache to its own logger
// dump
CacheAnalysis analysis = new CacheAnalysis(cache);
logger.debug(analysis);
// get the size
allCachesTotalSize += analysis.getSize();
double cacheEstimatedMaxSize = analysis.getEstimatedMaxSize();
estimatedMaxSize += (Double.isNaN(cacheEstimatedMaxSize) || Double.isInfinite(cacheEstimatedMaxSize))
? 0.0
: cacheEstimatedMaxSize;
if (cacheLogger.isDebugEnabled())
{
CacheAnalysis analysis = new CacheAnalysis(cache);
cacheLogger.debug(analysis);
// get the size
allCachesTotalSize += analysis.getSize();
double cacheEstimatedMaxSize = analysis.getEstimatedMaxSize();
estimatedMaxSize += (Double.isNaN(cacheEstimatedMaxSize) || Double.isInfinite(cacheEstimatedMaxSize))
? 0.0
: cacheEstimatedMaxSize;
}
else
{
analyzeAll = false;
}
}
if (analyzeAll)
{
// check the size
double sizePercentage = (double)allCachesTotalSize / (double)maxHeapSize * 100.0;
double maxSizePercentage = estimatedMaxSize / (double)maxHeapSize * 100.0;
String msg = String.format(
"EHCaches currently consume %5.2f MB or %3.2f percent of system VM size. \n" +
"The estimated maximum size is %5.2f MB or %3.2f percent of system VM size.",
(double)allCachesTotalSize / 1024.0 / 1024.0,
sizePercentage,
estimatedMaxSize / 1024.0 / 1024.0,
maxSizePercentage);
logger.debug(msg);
}
// check the size
double sizePercentage = (double)allCachesTotalSize / (double)maxHeapSize * 100.0;
double maxSizePercentage = estimatedMaxSize / (double)maxHeapSize * 100.0;
String msg = String.format(
"EHCaches currently consume %5.2f MB or %3.2f percent of system VM size. \n" +
"The estimated maximum size is %5.2f MB or %3.2f percent of system VM size.",
(double)allCachesTotalSize / 1024.0 / 1024.0,
sizePercentage,
estimatedMaxSize / 1024.0 / 1024.0,
maxSizePercentage);
logger.debug(msg);
}
private static class CacheAnalysis
@@ -211,15 +221,17 @@ public class EhCacheTracerJob implements Job
long maxSize = cache.getMaxElementsInMemory();
long currentSize = cache.getMemoryStoreSize();
long hitCount = cache.getHitCount();
long missCount = cache.getMissCountNotFound();
long totalMissCount = cache.getMissCountNotFound() + cache.getMissCountExpired();
double hitRatio = (double)hitCount / (double)(totalMissCount + hitCount) * 100.0;
double percentageFull = (double)currentSize / (double)maxSize * 100.0;
double estMaxSize = sizeMB / (double) currentSize * (double) maxSize;
StringBuilder sb = new StringBuilder(512);
sb.append(" Analyzing EHCache: \n")
.append("===> ").append(cache.getName()).append("\n")
.append(" Hit Count: ").append(String.format("%10d hits ", hitCount ))
.append(" | Miss Count: ").append(String.format("%10d misses ", missCount )).append("\n")
sb.append("\n")
.append("===> EHCache: ").append(cache).append("\n")
.append(" Hit Ratio: ").append(String.format("%10.2f percent ", hitRatio ))
.append(" | Hit Count: ").append(String.format("%10d hits ", hitCount ))
.append(" | Miss Count: ").append(String.format("%10d misses ", totalMissCount )).append("\n")
.append(" Deep Size: ").append(String.format("%10.2f MB ", sizeMB ))
.append(" | Current Count: ").append(String.format("%10d entries ", currentSize )).append("\n")
.append(" Percentage used: ").append(String.format("%10.2f percent", percentageFull))

View File

@@ -23,6 +23,7 @@ import java.util.List;
import junit.framework.TestCase;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
@@ -36,8 +37,8 @@ import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.cmr.search.ResultSet;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.ApplicationContextHelper;
import org.alfresco.util.GUID;
@@ -66,7 +67,6 @@ public class FileFolderPerformanceTester extends TestCase
private AuthenticationComponent authenticationComponent;
private NodeService nodeService;
private FileFolderService fileFolderService;
private StoreRef storeRef;
private NodeRef rootFolderRef;
private File dataFile;
@@ -78,18 +78,30 @@ public class FileFolderPerformanceTester extends TestCase
authenticationComponent = (AuthenticationComponent) ctx.getBean("authenticationComponent");
nodeService = serviceRegistry.getNodeService();
fileFolderService = serviceRegistry.getFileFolderService();
SearchService searchService = serviceRegistry.getSearchService();
// authenticate
authenticationComponent.setSystemUserAsCurrentUser();
// create a folder root to work in
storeRef = nodeService.createStore(StoreRef.PROTOCOL_WORKSPACE, getName() + "_" + System.currentTimeMillis());
NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
rootFolderRef = nodeService.createNode(
rootNodeRef,
ContentModel.ASSOC_CHILDREN,
QName.createQName(NamespaceService.ALFRESCO_URI, getName()),
ContentModel.TYPE_FOLDER).getChildRef();
// find the guest folder
StoreRef storeRef = new StoreRef(StoreRef.PROTOCOL_WORKSPACE, "SpacesStore");
ResultSet rs = searchService.query(storeRef, SearchService.LANGUAGE_XPATH, "/app:company_home");
try
{
if (rs.length() == 0)
{
throw new AlfrescoRuntimeException("Didn't find Company Home");
}
NodeRef companyHomeNodeRef = rs.getNodeRef(0);
rootFolderRef = fileFolderService.create(
companyHomeNodeRef,
getName() + "_" + System.currentTimeMillis(),
ContentModel.TYPE_FOLDER).getNodeRef();
}
finally
{
rs.close();
}
dataFile = AbstractContentTransformerTest.loadQuickTestFile("txt");
}
@@ -302,16 +314,12 @@ public class FileFolderPerformanceTester extends TestCase
}
}
public void test_1_ordered_1_10() throws Exception
{
buildStructure(rootFolderRef, 1, false, 1, 10, null);
}
// public void test_1_ordered_1_10_read() throws Exception
// /** Load 5000 files into a single folder using 2 threads */
// public void test_2_ordered_1_2500() throws Exception
// {
// buildStructure(rootFolderRef, 1, false, 50, 1, null);
// readStructure(rootFolderRef, 50, 1000, null);
// buildStructure(rootFolderRef, 2, false, 1, 2500, new double[] {0.25, 0.50, 0.75});
// }
//
// public void test_4_ordered_10_100() throws Exception
// {
// buildStructure(rootFolderRef, 4, false, 10, 100, new double[] {0.25, 0.50, 0.75});
@@ -341,16 +349,16 @@ public class FileFolderPerformanceTester extends TestCase
// 400,
// new double[] {0.05, 0.10, 0.20, 0.30, 0.40, 0.50, 0.60, 0.70, 0.80, 0.90});
// }
public void test_4_shuffled_10_100() throws Exception
{
buildStructure(
rootFolderRef,
4,
true,
10,
100,
new double[] {0.05, 0.10, 0.20, 0.30, 0.40, 0.50, 0.60, 0.70, 0.80, 0.90});
}
// public void test_4_shuffled_10_100() throws Exception
// {
// buildStructure(
// rootFolderRef,
// 4,
// true,
// 10,
// 100,
// new double[] {0.05, 0.10, 0.20, 0.30, 0.40, 0.50, 0.60, 0.70, 0.80, 0.90});
// }
// public void test_1_ordered_1_50000() throws Exception
// {
// buildStructure(

View File

@@ -487,6 +487,8 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
}
// set the index
assoc.setIndex(index);
// flush
nodeDaoService.flush();
}
public QName getType(NodeRef nodeRef) throws InvalidNodeRefException

View File

@@ -676,6 +676,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
{
Query query = session
.getNamedQuery(HibernateNodeDaoServiceImpl.QUERY_GET_CHILD_ASSOCS)
.setFlushMode(FlushMode.NEVER)
.setLong("parentId", parentNode.getId());
return query.list();
}
@@ -693,6 +694,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
{
Query query = session
.getNamedQuery(HibernateNodeDaoServiceImpl.QUERY_GET_CHILD_ASSOC_REFS)
.setFlushMode(FlushMode.NEVER)
.setLong("parentId", parentNode.getId());
return query.list();
}
@@ -712,6 +714,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
{
Query query = session
.getNamedQuery(HibernateNodeDaoServiceImpl.QUERY_GET_CHILD_ASSOC_REFS_BY_QNAME)
.setFlushMode(FlushMode.NEVER)
.setLong("parentId", parentNode.getId())
.setParameter("childAssocQName", assocQName);
return query.list();

View File

@@ -484,7 +484,6 @@ public class NTLMAuthenticationComponentImpl extends AbstractAuthenticationCompo
// Authenticate using the token
authenticate( authToken);
setCurrentUser( userName.toLowerCase());
}
/**