Merged V2.9 to HEAD

9241: Merged V2.2 to V2.9
      9119: Merged V2.1 to V2.2
         8671: Fix for AR-2221 - JavaScript scriptable Map objects recursively converted to Freemarker accessable maps
   9256: Merged V2.2 to V2.9 
      9100: Merged V2.1 to V2.2 
         8728 <Not required>: Latest AMP changes for AR-2212 
         8731: Faster content store cleaner 
         8738: Fix for AWC 1930 - support simple bind when building DNs that contain a comma 
         8835: Fix regression issue as discussed in ACT 2019 
         8861: Fix WCM-1158 
         8866: Fixed AR-2272: Module Management Tool distribution is broken 
         8872: Fixed distribution of benchmark executable jar after EHCache upgrade 
         8933: Fix for ACT-2469


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@9260 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley 2008-05-23 21:41:53 +00:00
parent bb3c776130
commit db95d287ee
12 changed files with 515 additions and 37 deletions

View File

@ -62,6 +62,12 @@
<property name="transactionService">
<ref bean="transactionService" />
</property>
<property name="escapeCommasInBind">
<value>${ldap.authentication.escapeCommasInBind}</value>
</property>
<property name="escapeCommasInUid">
<value>${ldap.authentication.escapeCommasInUid}</value>
</property>
</bean>
<!--

View File

@ -25,3 +25,13 @@ ldap.authentication.java.naming.security.principal=reader
# The password for the default principal (only used for LDAP sync)
ldap.authentication.java.naming.security.credentials=secret
# Escape commas entered by the user at bind time
# Useful when using simple authentication and the CN is part of the DN and contains commas
ldap.authentication.escapeCommasInBind=false
# Escape commas entered by the user when setting the authenticated user
# Useful when using simple authentication and the CN is part of the DN and contains commas, and the escaped \, is
# pulled in as part of an LDAP sync
# If this option is set to true it will break the default home folder provider as space names can not contain \
ldap.authentication.escapeCommasInUid=false

View File

@ -866,7 +866,7 @@
<value>getIndirectionPath</value>
<value>getHistory</value>
<value>getCommonAncestor</value>
<value>getLayereingInfo</value>
<value>getLayeringInfo</value>
<value>getNodeProperty</value>
<value>getNodeProperties</value>
<value>getStoreProperty</value>

View File

@ -259,7 +259,13 @@ public class ContentStoreCleaner
{
logger.debug("Starting content store cleanup.");
}
// Repeat attempts six times waiting 10 minutes between
executeInternal(0, 6, 600000);
}
public void executeInternal(int currentAttempt, int maxAttempts, long waitTime)
{
currentAttempt++;
// This handler removes the URLs from all the stores
final org.alfresco.repo.domain.ContentUrlDAO.ContentUrlHandler handler = new org.alfresco.repo.domain.ContentUrlDAO.ContentUrlHandler()
{
@ -317,6 +323,22 @@ public class ContentStoreCleaner
logger.debug(" Content store cleanup aborted.");
}
}
catch (Throwable e)
{
if (currentAttempt >= maxAttempts)
{
throw new AlfrescoRuntimeException("Failed to initiate content store clean", e);
}
if (RetryingTransactionHelper.extractRetryCause(e) != null)
{
// There are grounds for waiting and retrying
synchronized(this)
{
try { this.wait(waitTime); } catch (InterruptedException ee) {}
}
executeInternal(currentAttempt, maxAttempts, waitTime);
}
}
}
/**

View File

@ -0,0 +1,349 @@
/*
* Copyright (C) 2005-2007 Alfresco Software Limited.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing"
*/
package org.alfresco.repo.content.cleanup;
import java.io.File;
import java.io.Serializable;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.Date;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.content.AbstractContentStore;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.content.EmptyContentReader;
import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.content.filestore.FileContentReader;
import org.alfresco.repo.content.filestore.FileContentStore;
import org.alfresco.repo.content.filestore.FileContentWriter;
import org.alfresco.repo.domain.Node;
import org.alfresco.repo.domain.PropertyValue;
import org.alfresco.repo.domain.Store;
import org.alfresco.repo.domain.hibernate.NodeImpl;
import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.repo.node.db.NodeDaoService.NodePropertyHandler;
import org.alfresco.repo.transaction.SingleEntryTransactionResourceInterceptor;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.ContentData;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.tools.Repository;
import org.alfresco.tools.ToolException;
import org.alfresco.util.GUID;
import org.alfresco.util.TempFileProvider;
import org.alfresco.util.VmShutdownListener;
import org.apache.commons.lang.mutable.MutableInt;
import org.hibernate.SessionFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
/**
* Loads the repository up with orphaned content and then runs the cleaner.
* <p>
* A null content store produces ficticious content URLs. The DB is loaded with ficticious URLs.
* The process is kicked off.
*
* @author Derek Hulley
* @since 2.1.3
*/
public class ContentStoreCleanerScalabilityRunner extends Repository
{
private VmShutdownListener vmShutdownListener = new VmShutdownListener("ContentStoreCleanerScalabilityRunner");
private ApplicationContext ctx;
private SingleEntryTransactionResourceInterceptor txnResourceInterceptor;
private HibernateHelper hibernateHelper;
private TransactionService transactionService;
private NodeDaoService nodeDaoService;
private DictionaryService dictionaryService;
private ContentStore contentStore;
private ContentStoreCleaner cleaner;
/**
* Do the load and cleanup.
*/
public static void main(String[] args)
{
new ContentStoreCleanerScalabilityRunner().start(args);
}
@Override
protected synchronized int execute() throws ToolException
{
ctx = super.getApplicationContext();
txnResourceInterceptor = (SingleEntryTransactionResourceInterceptor) ctx.getBean("sessionSizeResourceInterceptor");
SessionFactory sessionFactory = (SessionFactory) ctx.getBean("sessionFactory");
hibernateHelper = new HibernateHelper();
hibernateHelper.setSessionFactory(sessionFactory);
transactionService = (TransactionService) ctx.getBean("TransactionService");
nodeDaoService = (NodeDaoService) ctx.getBean("nodeDaoService");
dictionaryService = (DictionaryService) ctx.getBean("dictionaryService");
int orphanCount = 100000;
contentStore = new NullContentStore(orphanCount);
loadData(orphanCount);
long beforeIterate = System.currentTimeMillis();
// iterateOverProperties();
long afterIterate = System.currentTimeMillis();
double aveIterate = (double) (afterIterate - beforeIterate) / (double) orphanCount / 1000D;
System.out.println("Ready to clean store: " + contentStore);
synchronized(this)
{
try { this.wait(10000L); } catch (InterruptedException e) {}
}
long beforeClean = System.currentTimeMillis();
clean();
long afterClean = System.currentTimeMillis();
double aveClean = (double) (afterClean - beforeClean) / (double) orphanCount / 1000D;
System.out.println();
System.out.println(String.format("Iterating took %3f per 1000 content URLs in DB", aveIterate));
System.out.println(String.format("Cleaning took %3f per 1000 content URLs in DB", aveClean));
return 0;
}
private void loadData(final int maxCount)
{
final MutableInt doneCount = new MutableInt(0);
// Batches of 1000 objects
RetryingTransactionCallback<Integer> makeNodesCallback = new RetryingTransactionCallback<Integer>()
{
public Integer execute() throws Throwable
{
for (int i = 0; i < 1000; i++)
{
// We don't need to write anything
String contentUrl = FileContentStore.createNewFileStoreUrl();
ContentData contentData = new ContentData(contentUrl, MimetypeMap.MIMETYPE_TEXT_PLAIN, 10, "UTF-8");
hibernateHelper.makeNode(contentData);
int count = doneCount.intValue();
count++;
doneCount.setValue(count);
// Do some reporting
if (count % 1000 == 0)
{
System.out.println(String.format(" " + (new Date()) + "Total created: %6d", count));
}
// Double check for shutdown
if (vmShutdownListener.isVmShuttingDown())
{
break;
}
}
return maxCount;
}
};
int repetitions = (int) Math.floor((double)maxCount / 1000.0);
for (int i = 0; i < repetitions; i++)
{
transactionService.getRetryingTransactionHelper().doInTransaction(makeNodesCallback);
}
}
private void iterateOverProperties()
{
final NodePropertyHandler nodePropertyHandler = new NodePropertyHandler()
{
int count = 0;
public void handle(Node node, Serializable value)
{
count++;
if (count % 1000 == 0)
{
System.out.println(" " + (new Date()) + "Iterated over " + count + " content items");
}
if (vmShutdownListener.isVmShuttingDown())
{
throw new RuntimeException("VM Shut down");
}
}
};
final DataTypeDefinition contentDataType = dictionaryService.getDataType(DataTypeDefinition.CONTENT);
// execute in READ-WRITE txn
RetryingTransactionCallback<Object> getUrlsCallback = new RetryingTransactionCallback<Object>()
{
public Object execute() throws Exception
{
nodeDaoService.getPropertyValuesByActualType(contentDataType, nodePropertyHandler);
return null;
};
};
transactionService.getRetryingTransactionHelper().doInTransaction(getUrlsCallback);
}
private void clean()
{
ContentStoreCleanerListener listener = new ContentStoreCleanerListener()
{
private int count = 0;
public void beforeDelete(ContentReader reader) throws ContentIOException
{
count++;
if (count % 1000 == 0)
{
System.out.println(String.format(" Total deleted: %6d", count));
}
}
};
// We use the default cleaner, but fix it up a bit
cleaner = (ContentStoreCleaner) ctx.getBean("contentStoreCleaner");
cleaner.setListeners(Collections.singletonList(listener));
cleaner.setProtectDays(0);
cleaner.setStores(Collections.singletonList(contentStore));
// The cleaner has its own txns
cleaner.execute();
}
private class NullContentStore extends AbstractContentStore
{
private ThreadLocal<File> hammeredFile;
private int count;
private int deletedCount;
private NullContentStore(int count)
{
hammeredFile = new ThreadLocal<File>();
this.count = count;
}
public boolean isWriteSupported()
{
return true;
}
/**
* Returns a writer to a thread-unique file. It's always the same file per thread so you must
* use and close the writer before getting another.
*/
@Override
protected ContentWriter getWriterInternal(ContentReader existingContentReader, String newContentUrl)
{
File file = hammeredFile.get();
if (file == null)
{
file = TempFileProvider.createTempFile("NullContentStore", ".txt");
hammeredFile.set(file);
}
return new FileContentWriter(file);
}
@Override
public void getUrls(Date createdAfter, Date createdBefore, ContentUrlHandler handler) throws ContentIOException
{
// Make up it up
for (int i = 0; i < count; i++)
{
String contentUrl = FileContentStore.createNewFileStoreUrl() + "-imaginary";
handler.handle(contentUrl);
}
}
public ContentReader getReader(String contentUrl)
{
File file = hammeredFile.get();
if (file == null)
{
return new EmptyContentReader(contentUrl);
}
else
{
return new FileContentReader(file);
}
}
@Override
public boolean delete(String contentUrl)
{
deletedCount++;
if (deletedCount % 1000 == 0)
{
System.out.println(String.format(" Deleted %6d files", deletedCount));
}
return true;
}
}
private class HibernateHelper extends HibernateDaoSupport
{
private Method methodMakeNode;
private QName dataTypeDefContent;
private QName contentQName;
public HibernateHelper()
{
Class<HibernateHelper> clazz = HibernateHelper.class;
try
{
methodMakeNode = clazz.getMethod("makeNode", new Class[] {ContentData.class});
}
catch (NoSuchMethodException e)
{
throw new RuntimeException("Failed to get methods");
}
dataTypeDefContent = DataTypeDefinition.CONTENT;
contentQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "realContent");
}
/**
* Creates a node with two properties
*/
public void makeNode(ContentData contentData)
{
throw new UnsupportedOperationException("Fix this method up");
// Store store = nodeDaoService.getStore(StoreRef.PROTOCOL_WORKSPACE, "SpacesStore");
// Node node = new NodeImpl();
// // set other required properties
// node.setStore(store);
// node.setUuid(GUID.generate());
// node.setTypeQName(ContentModel.TYPE_CONTENT);
//
// PropertyValue propertyValue = new PropertyValue(dataTypeDefContent, contentData);
// node.getProperties().put(contentQName, propertyValue);
// // persist the node
// getHibernateTemplate().save(node);
//
// txnResourceInterceptor.performManualCheck(methodMakeNode, 10);
}
}
}

View File

@ -148,6 +148,34 @@ public class ContentUrlDAOTest extends TestCase
}
}
public void testDeleteContentUrlSpeed() throws Throwable
{
UserTransaction txn = transactionService.getUserTransaction();
try
{
txn.begin();
final Set<String> urls = makeUrls(1000);
// Delete them
long startTimeNs = System.nanoTime();
for (String url : urls)
{
dao.deleteContentUrl(url);
}
long endTimeNs = System.nanoTime();
double aveTimeMs = (double) (endTimeNs - startTimeNs) / 1000000D / 1000D;
System.out.println("Average delete is " + aveTimeMs + "ms per content URL");
txn.commit();
}
catch (Throwable e)
{
try { txn.rollback(); } catch (Throwable ee) {}
throw e;
}
}
public void testDeleteContentUrls() throws Throwable
{
UserTransaction txn = transactionService.getUserTransaction();

View File

@ -4,9 +4,10 @@ import java.util.Set;
import org.alfresco.repo.domain.ContentUrl;
import org.alfresco.repo.domain.ContentUrlDAO;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hibernate.CacheMode;
import org.hibernate.FlushMode;
import org.hibernate.Query;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
@ -28,10 +29,29 @@ public class HibernateContentUrlDAOImpl extends HibernateDaoSupport implements C
private static final String UPDATE_DELETE_IN_LIST = "contentUrl.DeleteInList";
private static final String UPDATE_DELETE_ALL = "contentUrl.DeleteAll";
/** Txn resource key to check for required flushes */
private static final String KEY_REQUIRES_FLUSH = "HibernateContentUrlDAOImpl.requiresFlush";
private static Log logger = LogFactory.getLog(HibernateContentUrlDAOImpl.class);
private void flushIfRequired()
{
Boolean requiresFlush = (Boolean) AlfrescoTransactionSupport.getResource(KEY_REQUIRES_FLUSH);
if (requiresFlush == null)
{
requiresFlush = Boolean.FALSE;
AlfrescoTransactionSupport.bindResource(KEY_REQUIRES_FLUSH, Boolean.FALSE);
}
else if (requiresFlush.booleanValue() == true)
{
getSession().flush();
AlfrescoTransactionSupport.bindResource(KEY_REQUIRES_FLUSH, Boolean.FALSE);
}
}
public ContentUrl createContentUrl(String contentUrl)
{
AlfrescoTransactionSupport.bindResource(KEY_REQUIRES_FLUSH, Boolean.TRUE);
ContentUrl entity = new ContentUrlImpl();
entity.setContentUrl(contentUrl);
getSession().save(entity);
@ -40,13 +60,16 @@ public class HibernateContentUrlDAOImpl extends HibernateDaoSupport implements C
public void getAllContentUrls(final ContentUrlHandler handler)
{
// Force a flush if there are pending changes
flushIfRequired();
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session
.getNamedQuery(HibernateContentUrlDAOImpl.QUERY_GET_ALL)
.setCacheMode(CacheMode.IGNORE);
;
return query.scroll(ScrollMode.FORWARD_ONLY);
}
};
@ -60,20 +83,21 @@ public class HibernateContentUrlDAOImpl extends HibernateDaoSupport implements C
public void deleteContentUrl(final String contentUrl)
{
// Force a flush if there are pending changes
flushIfRequired();
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
session.flush();
Query query = session
.getNamedQuery(HibernateContentUrlDAOImpl.UPDATE_DELETE_BY_URL)
.setCacheMode(CacheMode.IGNORE)
.setFlushMode(FlushMode.MANUAL)
.setString("contentUrl", contentUrl);
return (Integer) query.executeUpdate();
}
};
Integer deletedCount = (Integer) getHibernateTemplate().execute(callback);
int entityCount = getSession().getStatistics().getEntityCount();
if (logger.isDebugEnabled())
{
logger.debug("Deleted " + deletedCount + " ContentUrl entities.");
@ -82,14 +106,16 @@ public class HibernateContentUrlDAOImpl extends HibernateDaoSupport implements C
public void deleteContentUrls(final Set<String> contentUrls)
{
// Force a flush if there are pending changes
flushIfRequired();
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
session.flush();
Query query = session
.getNamedQuery(HibernateContentUrlDAOImpl.UPDATE_DELETE_IN_LIST)
.setCacheMode(CacheMode.IGNORE)
.setFlushMode(FlushMode.MANUAL)
.setParameterList("contentUrls", contentUrls, TypeFactory.basic("string"));
return (Integer) query.executeUpdate();
}
@ -103,6 +129,9 @@ public class HibernateContentUrlDAOImpl extends HibernateDaoSupport implements C
public void deleteAllContentUrls()
{
// Force a flush if there are pending changes
flushIfRequired();
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
@ -110,7 +139,8 @@ public class HibernateContentUrlDAOImpl extends HibernateDaoSupport implements C
session.flush();
Query query = session
.getNamedQuery(HibernateContentUrlDAOImpl.UPDATE_DELETE_ALL)
.setCacheMode(CacheMode.IGNORE);
.setFlushMode(FlushMode.MANUAL)
;
return (Integer) query.executeUpdate();
}
};

View File

@ -175,7 +175,7 @@ public class SessionSizeResourceManager extends HibernateDaoSupport implements M
{
if (logger.isDebugEnabled())
{
logger.error(stats);
logger.debug(stats);
}
Set<EntityKey> keys = new HashSet<EntityKey>((Set<EntityKey>)stats.getEntityKeys());
for (EntityKey key : keys)

View File

@ -152,11 +152,11 @@ public class ValueConverter
// recursively call this method to convert the unwrapped value
value = convertValueForRepo((Serializable)((Wrapper)value).unwrap());
}
else if (value instanceof ScriptableObject)
else if (value instanceof Scriptable)
{
// a scriptable object will probably indicate a multi-value property
// set using a JavaScript Array object
ScriptableObject values = (ScriptableObject)value;
Scriptable values = (Scriptable)value;
if (value instanceof IdScriptableObject)
{
@ -230,7 +230,7 @@ public class ValueConverter
}
else
{
// convert JavaScript map to values to a Map of Serializable objects
// convert Scriptable object of values to a Map of Serializable objects
Object[] propIds = values.getIds();
Map<String, Serializable> propValues = new HashMap<String, Serializable>(propIds.length);
for (int i=0; i<propIds.length; i++)

View File

@ -37,6 +37,9 @@ import org.alfresco.repo.security.authentication.AuthenticationException;
*/
public class LDAPAuthenticationComponentImpl extends AbstractAuthenticationComponent
{
private boolean escapeCommasInBind = false;
private boolean escapeCommasInUid = false;
private String userNameFormat;
@ -47,18 +50,26 @@ public class LDAPAuthenticationComponentImpl extends AbstractAuthenticationCompo
super();
}
public void setLDAPInitialDirContextFactory(LDAPInitialDirContextFactory ldapInitialDirContextFactory)
{
this.ldapInitialContextFactory = ldapInitialDirContextFactory;
}
public void setUserNameFormat(String userNameFormat)
{
this.userNameFormat = userNameFormat;
}
public void setEscapeCommasInBind(boolean escapeCommasInBind)
{
this.escapeCommasInBind = escapeCommasInBind;
}
public void setEscapeCommasInUid(boolean escapeCommasInUid)
{
this.escapeCommasInUid = escapeCommasInUid;
}
/**
* Implement the authentication method
*/
@ -67,16 +78,16 @@ public class LDAPAuthenticationComponentImpl extends AbstractAuthenticationCompo
InitialDirContext ctx = null;
try
{
ctx = ldapInitialContextFactory.getInitialDirContext(String.format(userNameFormat, new Object[]{userName}), new String(password));
ctx = ldapInitialContextFactory.getInitialDirContext(String.format(userNameFormat, new Object[] { escapeUserName(userName, escapeCommasInBind) }), new String(password));
// Authentication has been successful.
// Set the current user, they are now authenticated.
setCurrentUser(userName);
setCurrentUser(escapeUserName(userName, escapeCommasInUid));
}
finally
{
if(ctx != null)
if (ctx != null)
{
try
{
@ -91,6 +102,29 @@ public class LDAPAuthenticationComponentImpl extends AbstractAuthenticationCompo
}
}
private static String escapeUserName(String userName, boolean escape)
{
if (escape)
{
StringBuffer sb = new StringBuffer();
for (int i = 0; i < userName.length(); i++)
{
char c = userName.charAt(i);
if (c == ',')
{
sb.append('\\');
}
sb.append(c);
}
return sb.toString();
}
else
{
return userName;
}
}
@Override
protected boolean implementationAllowsGuestLogin()
@ -102,13 +136,13 @@ public class LDAPAuthenticationComponentImpl extends AbstractAuthenticationCompo
return true;
}
catch(Exception e)
catch (Exception e)
{
return false;
}
finally
{
if(ctx != null)
if (ctx != null)
{
try
{
@ -122,5 +156,4 @@ public class LDAPAuthenticationComponentImpl extends AbstractAuthenticationCompo
}
}
}

View File

@ -43,7 +43,7 @@ import org.apache.commons.logging.LogFactory;
*/
public class QNameMap<K,V> implements Map, Cloneable, Serializable
{
private static final long serialVersionUID = 2077228225832792605L;
private static final long serialVersionUID = -6578946123712939601L;
protected static Log logger = LogFactory.getLog(QNameMap.class);
protected Map<String, Object> contents = new HashMap<String, Object>(16, 1.0f);

View File

@ -29,16 +29,16 @@ package org.alfresco.tools;
*
* @author David Caruana
*/
/*package*/ class ToolException extends RuntimeException
public class ToolException extends RuntimeException
{
private static final long serialVersionUID = 3257008761007847733L;
/*package*/ ToolException(String msg)
public ToolException(String msg)
{
super(msg);
}
/*package*/ ToolException(String msg, Throwable cause)
public ToolException(String msg, Throwable cause)
{
super(msg, cause);
}