Merged V3.0 to HEAD

12185: Fix 3.0 SP1 installation on non-Oracle databases. Removed creation of indexes in AlfrescoPostCreate-2.2-MappedFKIndexes.sql that were also in AlfrescoPostCreate-2.2-Extra.sql
  12186: Performance improvements to HibernateNodeDaoServiceImpl
  12188: Multi user tests: enable graceful web script recovery on optimistic locking failure (...)
  12191: Improve Javascript execution performance in Web Scripts & Improve error presentation (...) thrown by JavaScript
  12192: Share performance improvements: stop AbstractFeedGenerator from 'choking' the repository with too many web script requests
  12193: Multi user testing: don't suppress all exceptions during Wiki Move.
  12194: Multi user testing. Don't suppress all runtime exceptions in script site node object.
  12195: Multi user testing. Convert User bean object to use a retrying transaction so that optimistic locking failures are handled.
  12196: Multi user testing: Configuration changes to support concurrent access by 20 users


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@12522 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Jan Vonka
2008-12-19 17:44:01 +00:00
parent de05f999e4
commit 08676ac665
14 changed files with 287 additions and 201 deletions

View File

@@ -241,6 +241,9 @@
<property name="defaultAutoCommit" > <property name="defaultAutoCommit" >
<value>false</value> <value>false</value>
</property> </property>
<property name="defaultTransactionIsolation" >
<value>${db.txn.isolation}</value>
</property>
</bean> </bean>
<!-- Characterset decoder --> <!-- Characterset decoder -->

View File

@@ -28,8 +28,6 @@ CREATE INDEX fk_alf_autha_ali ON alf_authority_alias (alias_id);
CREATE INDEX fk_alf_autha_aut ON alf_authority_alias (auth_id); CREATE INDEX fk_alf_autha_aut ON alf_authority_alias (auth_id);
CREATE INDEX fk_alf_cass_pnode ON alf_child_assoc (parent_node_id); CREATE INDEX fk_alf_cass_pnode ON alf_child_assoc (parent_node_id);
CREATE INDEX fk_alf_cass_tqn ON alf_child_assoc (type_qname_id);
CREATE INDEX fk_alf_cass_qnns ON alf_child_assoc (qname_ns_id);
CREATE INDEX fk_alf_cass_cnode ON alf_child_assoc (child_node_id); CREATE INDEX fk_alf_cass_cnode ON alf_child_assoc (child_node_id);
-- alf_global_attributes.attribute is declared unique. Indexes may automatically have been created. -- alf_global_attributes.attribute is declared unique. Indexes may automatically have been created.
@@ -42,20 +40,16 @@ CREATE INDEX fk_alf_matt_matt ON alf_map_attribute_entries (map_id);
CREATE INDEX fk_alf_matt_att ON alf_map_attribute_entries (attribute_id); CREATE INDEX fk_alf_matt_att ON alf_map_attribute_entries (attribute_id);
CREATE INDEX fk_alf_node_acl ON alf_node (acl_id); CREATE INDEX fk_alf_node_acl ON alf_node (acl_id);
CREATE INDEX fk_alf_node_tqn ON alf_node (type_qname_id);
CREATE INDEX fk_alf_node_txn ON alf_node (transaction_id); CREATE INDEX fk_alf_node_txn ON alf_node (transaction_id);
CREATE INDEX fk_alf_node_store ON alf_node (store_id); CREATE INDEX fk_alf_node_store ON alf_node (store_id);
CREATE INDEX fk_alf_nasp_n ON alf_node_aspects (node_id); CREATE INDEX fk_alf_nasp_n ON alf_node_aspects (node_id);
CREATE INDEX fk_alf_nass_snode ON alf_node_assoc (source_node_id); CREATE INDEX fk_alf_nass_snode ON alf_node_assoc (source_node_id);
CREATE INDEX fk_alf_nass_tqn ON alf_node_assoc (type_qname_id);
CREATE INDEX fk_alf_nass_tnode ON alf_node_assoc (target_node_id); CREATE INDEX fk_alf_nass_tnode ON alf_node_assoc (target_node_id);
CREATE INDEX fk_alf_nprop_n ON alf_node_properties (node_id); CREATE INDEX fk_alf_nprop_n ON alf_node_properties (node_id);
CREATE INDEX fk_alf_perm_tqn ON alf_permission (type_qname_id);
CREATE INDEX fk_alf_qname_ns ON alf_qname (ns_id); CREATE INDEX fk_alf_qname_ns ON alf_qname (ns_id);
CREATE INDEX fk_alf_store_root ON alf_store (root_node_id); CREATE INDEX fk_alf_store_root ON alf_store (root_node_id);
@@ -80,7 +74,6 @@ CREATE INDEX fk_avm_nprop_n ON avm_node_properties (node_id);
CREATE INDEX fk_avm_n_acl ON avm_nodes (acl_id); CREATE INDEX fk_avm_n_acl ON avm_nodes (acl_id);
CREATE INDEX fk_avm_n_store ON avm_nodes (store_new_id); CREATE INDEX fk_avm_n_store ON avm_nodes (store_new_id);
CREATE INDEX fk_avm_sprop_qname ON avm_store_properties (qname_id);
CREATE INDEX fk_avm_sprop_store ON avm_store_properties (avm_store_id); CREATE INDEX fk_avm_sprop_store ON avm_store_properties (avm_store_id);
CREATE INDEX fk_avm_s_acl ON avm_stores (acl_id); CREATE INDEX fk_avm_s_acl ON avm_stores (acl_id);

View File

@@ -8,7 +8,7 @@ server.transaction.mode.readOnly=PROPAGATION_REQUIRED, readOnly
server.transaction.mode.default=PROPAGATION_REQUIRED server.transaction.mode.default=PROPAGATION_REQUIRED
server.transaction.allow-writes=true server.transaction.allow-writes=true
server.transaction.max-retries=20 server.transaction.max-retries=40
server.transaction.min-retry-wait-ms=100 server.transaction.min-retry-wait-ms=100
server.transaction.max-retry-wait-ms=2000 server.transaction.max-retry-wait-ms=2000
server.transaction.wait-increment-ms=100 server.transaction.wait-increment-ms=100

View File

@@ -52,6 +52,7 @@
# #
#db.driver=com.microsoft.sqlserver.jdbc.SQLServerDriver #db.driver=com.microsoft.sqlserver.jdbc.SQLServerDriver
#db.url=jdbc:sqlserver://localhost:1433;databaseName=alfresco #db.url=jdbc:sqlserver://localhost:1433;databaseName=alfresco
#db.txn.isolation=4096
# The well known RMI registry port is defined in the alfresco-shared.properties file # The well known RMI registry port is defined in the alfresco-shared.properties file
# alfresco.rmi.services.port=50500 # alfresco.rmi.services.port=50500

View File

@@ -160,7 +160,8 @@ db.url=jdbc:mysql:///${db.name}
db.username=alfresco db.username=alfresco
db.password=alfresco db.password=alfresco
db.pool.initial=10 db.pool.initial=10
db.pool.max=20 db.pool.max=40
db.txn.isolation=-1
# Email configuration # Email configuration
mail.host= mail.host=

View File

@@ -52,7 +52,7 @@ public abstract class AbstractFeedGenerator implements FeedGenerator
private RepoCtx ctx = null; private RepoCtx ctx = null;
private boolean busy; private volatile boolean busy;
public void setPostDaoService(ActivityPostDaoService postDaoService) public void setPostDaoService(ActivityPostDaoService postDaoService)
{ {
@@ -124,15 +124,13 @@ public abstract class AbstractFeedGenerator implements FeedGenerator
return; return;
} }
checkProperties(); busy = true;
try try
{ {
// run at least one job cycle checkProperties();
boolean moreWork = true;
while (moreWork) // run one job cycle
{ generate();
moreWork = generate();
}
} }
catch (Throwable e) catch (Throwable e)
{ {

View File

@@ -38,12 +38,14 @@ import java.net.URISyntaxException;
import java.net.URL; import java.net.URL;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.Date; import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.TreeMap;
import org.alfresco.repo.activities.feed.control.FeedControlDAO; import org.alfresco.repo.activities.feed.control.FeedControlDAO;
import org.alfresco.repo.activities.post.ActivityPostDAO; import org.alfresco.repo.activities.post.ActivityPostDAO;
@@ -106,6 +108,10 @@ public abstract class FeedTaskProcessor
Configuration cfg = getFreemarkerConfiguration(ctx); Configuration cfg = getFreemarkerConfiguration(ctx);
Map<String, List<String>> activityTemplates = new HashMap<String, List<String>>(10); Map<String, List<String>> activityTemplates = new HashMap<String, List<String>>(10);
Map<String, Set<String>> siteConnectedUsers = new TreeMap<String, Set<String>>();
Map<String, Template> templateCache = new TreeMap<String, Template>();
// for each activity post ... // for each activity post ...
for (ActivityPostDAO activityPost : activityPosts) for (ActivityPostDAO activityPost : activityPosts)
@@ -192,28 +198,37 @@ public abstract class FeedTaskProcessor
model.put("xmldate", new ISO8601DateFormatMethod()); model.put("xmldate", new ISO8601DateFormatMethod());
model.put("repoEndPoint", ctx.getRepoEndPoint()); model.put("repoEndPoint", ctx.getRepoEndPoint());
Set<String> connectedUsers = null; // Get the members of this site
if ((activityPost.getSiteNetwork() == null) || (activityPost.getSiteNetwork().length() == 0)) String thisSite = activityPost.getSiteNetwork();
// Save hammering the repository by reusing cached site members
Set<String> connectedUsers = siteConnectedUsers.get(thisSite);
if (connectedUsers == null)
{ {
connectedUsers = new HashSet<String>(1); if ((thisSite == null) || (thisSite.length() == 0))
}
else
{
try
{ {
// Repository callback to get site members connectedUsers = Collections.singleton(""); // add empty posting userid - to represent site feed !
connectedUsers = getSiteMembers(ctx, activityPost.getSiteNetwork());
} }
catch(Exception e) else
{ {
logger.error(">>> Skipping activity post " + activityPost.getId() + " since failed to get site members: " + e); try
updatePostStatus(activityPost.getId(), ActivityPostDAO.STATUS.ERROR); {
continue; // Repository callback to get site members
connectedUsers = getSiteMembers(ctx, thisSite);
connectedUsers.add(""); // add empty posting userid - to represent site feed !
// Cache them for future use in this same invocation
siteConnectedUsers.put(thisSite, connectedUsers);
}
catch(Exception e)
{
logger.error(">>> Skipping activity post " + activityPost.getId() + " since failed to get site members: " + e);
updatePostStatus(activityPost.getId(), ActivityPostDAO.STATUS.ERROR);
continue;
}
} }
} }
connectedUsers.add(""); // add empty posting userid - to represent site feed !
try try
{ {
startTransaction(); startTransaction();
@@ -272,12 +287,12 @@ public abstract class FeedTaskProcessor
model.put("activityData", activityPost.getActivityData()); model.put("activityData", activityPost.getActivityData());
} }
String activitySummary = processFreemarker(fmTemplate, cfg, model); String activitySummary = processFreemarker(templateCache, fmTemplate, cfg, model);
if (! activitySummary.equals("")) if (! activitySummary.equals(""))
{ {
feed.setActivitySummary(activitySummary); feed.setActivitySummary(activitySummary);
feed.setActivitySummaryFormat(formatFound); feed.setActivitySummaryFormat(formatFound);
feed.setSiteNetwork(activityPost.getSiteNetwork()); feed.setSiteNetwork(thisSite);
feed.setAppTool(activityPost.getAppTool()); feed.setAppTool(activityPost.getAppTool());
feed.setPostDate(activityPost.getPostDate()); feed.setPostDate(activityPost.getPostDate());
feed.setPostId(activityPost.getId()); feed.setPostId(activityPost.getId());
@@ -502,9 +517,15 @@ public abstract class FeedTaskProcessor
return cfg; return cfg;
} }
protected String processFreemarker(String fmTemplate, Configuration cfg, Map<String, Object> model) throws IOException, TemplateException, Exception protected String processFreemarker(Map<String, Template> templateCache, String fmTemplate, Configuration cfg, Map<String, Object> model) throws IOException, TemplateException, Exception
{ {
Template myTemplate = cfg.getTemplate(fmTemplate); // Save on lots of modification date checking by caching templates locally
Template myTemplate = templateCache.get(fmTemplate);
if (myTemplate == null)
{
myTemplate = cfg.getTemplate(fmTemplate);
templateCache.put(fmTemplate, myTemplate);
}
ByteArrayOutputStream bos = new ByteArrayOutputStream(); ByteArrayOutputStream bos = new ByteArrayOutputStream();
Writer out = new OutputStreamWriter(bos); Writer out = new OutputStreamWriter(bos);

View File

@@ -5,6 +5,7 @@ import java.util.Date;
import java.util.Set; import java.util.Set;
import org.alfresco.model.ContentModel; import org.alfresco.model.ContentModel;
import org.alfresco.repo.domain.AuditableProperties;
import org.alfresco.repo.domain.Node; import org.alfresco.repo.domain.Node;
import org.alfresco.repo.domain.QNameDAO; import org.alfresco.repo.domain.QNameDAO;
import org.alfresco.repo.domain.Server; import org.alfresco.repo.domain.Server;
@@ -549,7 +550,9 @@ public class HibernateSessionHelperTest extends BaseSpringTest
node.setTypeQNameId(typeQNameId); node.setTypeQNameId(typeQNameId);
node.setTransaction(transaction); node.setTransaction(transaction);
node.setDeleted(false); node.setDeleted(false);
node.getAuditableProperties().setAuditValues("system", new Date(), false); AuditableProperties ap = new AuditableProperties();
node.setAuditableProperties(ap);
ap.setAuditValues("system", new Date(), false);
getSession().save(node); getSession().save(node);
return node; return node;

View File

@@ -82,7 +82,8 @@ public class NodeImpl extends LifecycleAdapter implements Node, Serializable
aspects = new HashSet<Long>(5); aspects = new HashSet<Long>(5);
properties = new HashMap<PropertyMapKey, NodePropertyValue>(5); properties = new HashMap<PropertyMapKey, NodePropertyValue>(5);
auditableProperties = new AuditableProperties(); // Note auditableProperties starts null, as hibernate maps a component containing nulls to null and this would
// cause a lot of dirty checks to fail!
} }
/** /**
@@ -355,6 +356,6 @@ public class NodeImpl extends LifecycleAdapter implements Node, Serializable
public void setAuditableProperties(AuditableProperties auditableProperties) public void setAuditableProperties(AuditableProperties auditableProperties)
{ {
this.auditableProperties = (auditableProperties == null ? new AuditableProperties() : auditableProperties); this.auditableProperties = auditableProperties;
} }
} }

View File

@@ -57,6 +57,8 @@ import org.mozilla.javascript.ImporterTopLevel;
import org.mozilla.javascript.Scriptable; import org.mozilla.javascript.Scriptable;
import org.mozilla.javascript.ScriptableObject; import org.mozilla.javascript.ScriptableObject;
import org.mozilla.javascript.WrapFactory; import org.mozilla.javascript.WrapFactory;
import org.mozilla.javascript.WrappedException;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.util.FileCopyUtils; import org.springframework.util.FileCopyUtils;
/** /**
@@ -64,7 +66,7 @@ import org.springframework.util.FileCopyUtils;
* *
* @author Kevin Roast * @author Kevin Roast
*/ */
public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcessor, ScriptResourceLoader public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcessor, ScriptResourceLoader, InitializingBean
{ {
private static final Log logger = LogFactory.getLog(RhinoScriptProcessor.class); private static final Log logger = LogFactory.getLog(RhinoScriptProcessor.class);
@@ -82,6 +84,12 @@ public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcess
/** Store root path to resolve cm:name based scripts path from */ /** Store root path to resolve cm:name based scripts path from */
private String storePath; private String storePath;
/** Pre initialized secure scope object. */
private Scriptable secureScope;
/** Pre initialized non secure scope object. */
private Scriptable nonSecureScope;
/** /**
* Set the default store reference * Set the default store reference
* *
@@ -340,27 +348,12 @@ public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcess
Context cx = Context.enter(); Context cx = Context.enter();
try try
{ {
// The easiest way to embed Rhino is just to create a new scope this way whenever // Create a thread-specific scope from one of the shared scopes. See http://www.mozilla.org/rhino/scopes.html
// you need one. However, initStandardObjects is an expensive method to call and it
// allocates a fair amount of memory.
cx.setWrapFactory(wrapFactory); cx.setWrapFactory(wrapFactory);
Scriptable scope; Scriptable sharedScope = secure ? this.nonSecureScope : this.secureScope;
if (!secure) Scriptable scope = cx.newObject(sharedScope);
{ scope.setPrototype(sharedScope);
scope = cx.initStandardObjects(); scope.setParentScope(null);
// remove security issue related objects - this ensures the script may not access
// unsecure java.* libraries or import any other classes for direct access - only
// the configured root host objects will be available to the script writer
scope.delete("Packages");
scope.delete("getClass");
scope.delete("java");
}
else
{
// allow access to all libraries and objects, including the importer
// @see http://www.mozilla.org/rhino/ScriptingJava.html
scope = new ImporterTopLevel(cx);
}
// there's always a model, if only to hold the util objects // there's always a model, if only to hold the util objects
if (model == null) if (model == null)
@@ -398,6 +391,15 @@ public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcess
// extract java object result if wrapped by Rhino // extract java object result if wrapped by Rhino
return valueConverter.convertValueForRepo((Serializable)result); return valueConverter.convertValueForRepo((Serializable)result);
} }
catch (WrappedException w)
{
Throwable err = w.getWrappedException();
if (err instanceof RuntimeException)
{
throw (RuntimeException) err;
}
throw new AlfrescoRuntimeException(err.getMessage(), err);
}
catch (Throwable err) catch (Throwable err)
{ {
throw new AlfrescoRuntimeException(err.getMessage(), err); throw new AlfrescoRuntimeException(err.getMessage(), err);
@@ -464,4 +466,50 @@ public class RhinoScriptProcessor extends BaseProcessor implements ScriptProcess
return super.wrapAsJavaObject(cx, scope, javaObject, staticType); return super.wrapAsJavaObject(cx, scope, javaObject, staticType);
} }
} }
/**
* Pre initializes two scope objects (one secure and one not) with the standard objects preinitialised. This saves
* on very expensive calls to reinitialize a new scope on every web script execution. See
* http://www.mozilla.org/rhino/scopes.html
*
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
*/
public void afterPropertiesSet() throws Exception
{
// Initialise the secure scope
Context cx = Context.enter();
try
{
cx.setWrapFactory(wrapFactory);
this.secureScope = cx.initStandardObjects();
// remove security issue related objects - this ensures the script may not access
// unsecure java.* libraries or import any other classes for direct access - only
// the configured root host objects will be available to the script writer
this.secureScope.delete("Packages");
this.secureScope.delete("getClass");
this.secureScope.delete("java");
}
finally
{
Context.exit();
}
// Initialise the non-secure scope
cx = Context.enter();
try
{
cx.setWrapFactory(wrapFactory);
// allow access to all libraries and objects, including the importer
// @see http://www.mozilla.org/rhino/ScriptingJava.html
this.nonSecureScope = new ImporterTopLevel(cx);
}
finally
{
Context.exit();
}
}
} }

View File

@@ -55,7 +55,6 @@ import org.alfresco.service.cmr.action.Action;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition; import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryService; import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.lock.LockStatus; import org.alfresco.service.cmr.lock.LockStatus;
import org.alfresco.service.cmr.model.FileExistsException;
import org.alfresco.service.cmr.model.FileInfo; import org.alfresco.service.cmr.model.FileInfo;
import org.alfresco.service.cmr.model.FileNotFoundException; import org.alfresco.service.cmr.model.FileNotFoundException;
import org.alfresco.service.cmr.repository.AssociationRef; import org.alfresco.service.cmr.repository.AssociationRef;
@@ -348,10 +347,6 @@ public class ScriptNode implements Serializable, Scopeable
{ {
this.services.getFileFolderService().rename(this.nodeRef, name); this.services.getFileFolderService().rename(this.nodeRef, name);
} }
catch (FileExistsException e)
{
throw new AlfrescoRuntimeException("Failed to rename node " + nodeRef + " to " + name, e);
}
catch (FileNotFoundException e) catch (FileNotFoundException e)
{ {
throw new AlfrescoRuntimeException("Failed to rename node " + nodeRef + " to " + name, e); throw new AlfrescoRuntimeException("Failed to rename node " + nodeRef + " to " + name, e);

View File

@@ -114,6 +114,7 @@ import org.hibernate.ScrollableResults;
import org.hibernate.Session; import org.hibernate.Session;
import org.hibernate.criterion.Projections; import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Restrictions; import org.hibernate.criterion.Restrictions;
import org.hibernate.exception.ConstraintViolationException;
import org.springframework.dao.DataIntegrityViolationException; import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.orm.hibernate3.HibernateCallback; import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.orm.hibernate3.support.HibernateDaoSupport; import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
@@ -765,7 +766,8 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
{ {
String currentUser = getCurrentUser(); String currentUser = getCurrentUser();
Date currentDate = new Date(); Date currentDate = new Date();
AuditableProperties auditableProperties = node.getAuditableProperties(); AuditableProperties auditableProperties = new AuditableProperties();
node.setAuditableProperties(auditableProperties);
auditableProperties.setAuditValues(currentUser, currentDate, false); auditableProperties.setAuditValues(currentUser, currentDate, false);
} }
} }
@@ -779,6 +781,11 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
String currentUser = getCurrentUser(); String currentUser = getCurrentUser();
Date currentDate = new Date(); Date currentDate = new Date();
AuditableProperties auditableProperties = node.getAuditableProperties(); AuditableProperties auditableProperties = node.getAuditableProperties();
if (auditableProperties == null)
{
auditableProperties = new AuditableProperties();
node.setAuditableProperties(auditableProperties);
}
auditableProperties.setAuditValues(currentUser, currentDate, false); auditableProperties.setAuditValues(currentUser, currentDate, false);
} }
} }
@@ -792,6 +799,11 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
String currentUser = getCurrentUser(); String currentUser = getCurrentUser();
Date currentDate = new Date(); Date currentDate = new Date();
AuditableProperties auditableProperties = node.getAuditableProperties(); AuditableProperties auditableProperties = node.getAuditableProperties();
if (auditableProperties == null)
{
auditableProperties = new AuditableProperties();
node.setAuditableProperties(auditableProperties);
}
auditableProperties.setAuditValues(currentUser, currentDate, false); auditableProperties.setAuditValues(currentUser, currentDate, false);
} }
} }
@@ -810,8 +822,8 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
Node node = null; Node node = null;
if (uuid != null) if (uuid != null)
{ {
// Get any existing Node. A node with this UUID may have existed before, but must be marked // Get any existing Node. A node with this UUID may have existed before, but must be marked
// deleted; otherwise it will be considered live and valid // deleted; otherwise it will be considered live and valid
node = getNodeOrNull(store, uuid); node = getNodeOrNull(store, uuid);
} }
else else
@@ -1025,13 +1037,13 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
{ {
Long nodeTypeQNameId = qnameDAO.getOrCreateQName(nodeTypeQName).getFirst(); Long nodeTypeQNameId = qnameDAO.getOrCreateQName(nodeTypeQName).getFirst();
if (!nodeTypeQNameId.equals(node.getTypeQNameId())) if (!nodeTypeQNameId.equals(node.getTypeQNameId()))
{ {
node.setTypeQNameId(nodeTypeQNameId); node.setTypeQNameId(nodeTypeQNameId);
// We will need to record the change // We will need to record the change
recordNodeUpdate(node); recordNodeUpdate(node);
}
} }
} }
}
public Serializable getNodeProperty(Long nodeId, QName propertyQName) public Serializable getNodeProperty(Long nodeId, QName propertyQName)
{ {
@@ -1044,7 +1056,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
if (hasNodeAspect(node, ContentModel.ASPECT_AUDITABLE)) if (hasNodeAspect(node, ContentModel.ASPECT_AUDITABLE))
{ {
AuditableProperties auditableProperties = node.getAuditableProperties(); AuditableProperties auditableProperties = node.getAuditableProperties();
return auditableProperties.getAuditableProperty(propertyQName); return auditableProperties == null ? null : auditableProperties.getAuditableProperty(propertyQName);
} }
else else
{ {
@@ -1082,6 +1094,10 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
if (hasNodeAspect(node, ContentModel.ASPECT_AUDITABLE)) if (hasNodeAspect(node, ContentModel.ASPECT_AUDITABLE))
{ {
AuditableProperties auditableProperties = node.getAuditableProperties(); AuditableProperties auditableProperties = node.getAuditableProperties();
if (auditableProperties == null)
{
auditableProperties = new AuditableProperties();
}
converted.putAll(auditableProperties.getAuditableProperties()); converted.putAll(auditableProperties.getAuditableProperties());
} }
@@ -1272,14 +1288,14 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
aspectQNames = new HashSet<QName>(aspectQNames); aspectQNames = new HashSet<QName>(aspectQNames);
// Remove sys:referenceable // Remove sys:referenceable
aspectQNames.remove(ContentModel.ASPECT_REFERENCEABLE); aspectQNames.remove(ContentModel.ASPECT_REFERENCEABLE);
// Convert // Convert
Set<Long> aspectQNameIds = qnameDAO.convertQNamesToIds(aspectQNames, true); Set<Long> aspectQNameIds = qnameDAO.convertQNamesToIds(aspectQNames, true);
// Add them // Add them
Set<Long> nodeAspects = node.getAspects(); Set<Long> nodeAspects = node.getAspects();
nodeAspects.addAll(aspectQNameIds); nodeAspects.addAll(aspectQNameIds);
// Record change ID // Record change ID
recordNodeUpdate(node); recordNodeUpdate(node);
} }
@@ -1300,7 +1316,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
// Remove them // Remove them
Set<Long> nodeAspects = node.getAspects(); Set<Long> nodeAspects = node.getAspects();
nodeAspects.removeAll(aspectQNameIds); nodeAspects.removeAll(aspectQNameIds);
// Record change ID // Record change ID
recordNodeUpdate(node); recordNodeUpdate(node);
} }
@@ -1549,16 +1565,16 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
Long parentNodeId, Long parentNodeId,
Long childNodeId, Long childNodeId,
boolean isPrimary, boolean isPrimary,
QName assocTypeQName, final QName assocTypeQName,
QName assocQName, QName assocQName,
String newName) String newName)
{ {
Node parentNode = (Node) getSession().get(NodeImpl.class, parentNodeId); final Node parentNode = (Node) getSession().get(NodeImpl.class, parentNodeId);
Node childNode = (Node) getSession().get(NodeImpl.class, childNodeId); Node childNode = (Node) getSession().get(NodeImpl.class, childNodeId);
final Pair<String, Long> childNameUnique = getChildNameUnique(assocTypeQName, newName); final Pair<String, Long> childNameUnique = getChildNameUnique(assocTypeQName, newName);
ChildAssoc assoc = new ChildAssocImpl(); final ChildAssoc assoc = new ChildAssocImpl();
assoc.setTypeQName(qnameDAO, assocTypeQName); assoc.setTypeQName(qnameDAO, assocTypeQName);
assoc.setChildNodeName(childNameUnique.getFirst()); assoc.setChildNodeName(childNameUnique.getFirst());
assoc.setChildNodeNameCrc(childNameUnique.getSecond()); assoc.setChildNodeNameCrc(childNameUnique.getSecond());
@@ -1567,27 +1583,33 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
assoc.setIndex(-1); assoc.setIndex(-1);
// maintain inverse sets // maintain inverse sets
assoc.buildAssociation(parentNode, childNode); assoc.buildAssociation(parentNode, childNode);
// persist it // Make sure that all changes to the session are persisted so that we know if any
Long assocId; // failures are from the constraint or not
try DirtySessionMethodInterceptor.flushSession(getSession(false));
Long assocId = (Long) getHibernateTemplate().execute(new HibernateCallback()
{ {
assocId = (Long) getHibernateTemplate().save(assoc); public Object doInHibernate(Session session)
}
catch (Throwable e)
{
// There is already an entity
if (isDebugEnabled)
{ {
logger.debug( try
"Duplicate child association detected: \n" + {
" Parent Node: " + parentNode.getId() + "\n" + Object result = session.save(assoc);
" Child Name Used: " + childNameUnique); DirtySessionMethodInterceptor.flushSession(session);
return result;
}
catch (ConstraintViolationException e)
{
// There is already an entity
if (isDebugEnabled)
{
logger.debug("Duplicate child association detected: \n" + " Parent Node: "
+ parentNode.getId() + "\n" + " Child Name Used: " + childNameUnique, e);
}
throw new DuplicateChildNodeNameException(parentNode.getNodeRef(), assocTypeQName, childNameUnique
.getFirst());
}
} }
throw new DuplicateChildNodeNameException( });
parentNode.getNodeRef(),
assocTypeQName,
childNameUnique.getFirst());
}
// Add it to the cache // Add it to the cache
Set<Long> oldParentAssocIds = parentAssocsCache.get(childNode.getId()); Set<Long> oldParentAssocIds = parentAssocsCache.get(childNode.getId());
if (oldParentAssocIds != null) if (oldParentAssocIds != null)
@@ -1629,7 +1651,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
return new Pair<Long, ChildAssociationRef>(assocId, assoc.getChildAssocRef(qnameDAO)); return new Pair<Long, ChildAssociationRef>(assocId, assoc.getChildAssocRef(qnameDAO));
} }
public void setChildNameUnique(final Long childAssocId, String childName) public void setChildNameUnique(final Long childAssocId, final String childName)
{ {
/* /*
* Work out if there has been any change in the name * Work out if there has been any change in the name
@@ -1648,33 +1670,31 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
childAssoc.setChildNodeName(childNameUnique.getFirst()); childAssoc.setChildNodeName(childNameUnique.getFirst());
childAssoc.setChildNodeNameCrc(childNameUnique.getSecond().longValue()); childAssoc.setChildNodeNameCrc(childNameUnique.getSecond().longValue());
// Flush again to force a DB constraint here // Flush again to force a DB constraint here
DirtySessionMethodInterceptor.flushSession(session, true); try
// Done {
return null; DirtySessionMethodInterceptor.flushSession(session, true);
// Done
return null;
}
catch (ConstraintViolationException e)
{
// There is already an entity
if (isDebugEnabled)
{
logger.debug("Duplicate child association detected: \n" + " Parent Node: "
+ parentNode.getId() + "\n" + " Child Name Used: " + childNameUnique, e);
}
throw new DuplicateChildNodeNameException(parentNode.getNodeRef(), childAssoc
.getTypeQName(qnameDAO), childNameUnique.getFirst());
}
} }
}; };
// Make sure that all changes to the session are persisted so that we know if any // Make sure that all changes to the session are persisted so that we know if any
// failures are from the constraint or not // failures are from the constraint or not
DirtySessionMethodInterceptor.flushSession(getSession(false)); DirtySessionMethodInterceptor.flushSession(getSession(false));
try getHibernateTemplate().execute(callback);
{
getHibernateTemplate().execute(callback);
}
catch (Throwable e)
{
// There is already an entity
if (isDebugEnabled)
{
logger.debug(
"Duplicate child association detected: \n" +
" Parent Node: " + parentNode.getId() + "\n" +
" Child Name Used: " + childNameUnique);
}
throw new DuplicateChildNodeNameException(
parentNode.getNodeRef(),
childAssoc.getTypeQName(qnameDAO),
childNameUnique.getFirst());
}
// Done // Done
if (isDebugEnabled) if (isDebugEnabled)
@@ -1685,7 +1705,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
" Child Assoc: " + childAssoc); " Child Assoc: " + childAssoc);
} }
} }
/** /**
* Apply the <b>cm:name</b> to the child association. If the child name is <tt>null</tt> then * Apply the <b>cm:name</b> to the child association. If the child name is <tt>null</tt> then
* a GUID is generated as a substitute. * a GUID is generated as a substitute.
@@ -2973,12 +2993,12 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
{ {
Query query = session Query query = session
.getNamedQuery(HibernateNodeDaoServiceImpl.QUERY_GET_USERS_WITHOUT_USAGE) .getNamedQuery(HibernateNodeDaoServiceImpl.QUERY_GET_USERS_WITHOUT_USAGE)
.setString("storeProtocol", storeRef.getProtocol()) .setString("storeProtocol", storeRef.getProtocol())
.setString("storeIdentifier", storeRef.getIdentifier()) .setString("storeIdentifier", storeRef.getIdentifier())
.setParameter("usernamePropQNameID", usernamePropQNamePair.getFirst()) // cm:username .setParameter("usernamePropQNameID", usernamePropQNamePair.getFirst()) // cm:username
.setParameter("sizeCurrentPropQNameID", sizeCurrentPropQNamePair.getFirst()) // cm:sizeCurrent .setParameter("sizeCurrentPropQNameID", sizeCurrentPropQNamePair.getFirst()) // cm:sizeCurrent
.setParameter("personTypeQNameID", personTypeQNamePair.getFirst()) // cm:person .setParameter("personTypeQNameID", personTypeQNamePair.getFirst()) // cm:person
; ;
DirtySessionMethodInterceptor.setQueryFlushMode(session, query); DirtySessionMethodInterceptor.setQueryFlushMode(session, query);
return query.scroll(ScrollMode.FORWARD_ONLY); return query.scroll(ScrollMode.FORWARD_ONLY);
} }
@@ -3009,7 +3029,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
// Done // Done
} }
public void getUsersWithUsage( public void getUsersWithUsage(
final StoreRef storeRef, final StoreRef storeRef,
final ObjectArrayQueryCallback resultsCallback) final ObjectArrayQueryCallback resultsCallback)
@@ -3094,7 +3114,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
Map<PropertyMapKey, NodePropertyValue> properties = node.getProperties(); Map<PropertyMapKey, NodePropertyValue> properties = node.getProperties();
for (Map.Entry<PropertyMapKey, NodePropertyValue> entry : properties.entrySet()) for (Map.Entry<PropertyMapKey, NodePropertyValue> entry : properties.entrySet())
{ {
PropertyMapKey propertyKey = entry.getKey(); PropertyMapKey propertyKey = entry.getKey();
Long propertyQNameId = propertyKey.getQnameId(); Long propertyQNameId = propertyKey.getQnameId();
QName propertyQName = qnameDAO.getQName(propertyQNameId).getSecond(); QName propertyQName = qnameDAO.getQName(propertyQNameId).getSecond();
NodePropertyValue propertyValue = entry.getValue(); NodePropertyValue propertyValue = entry.getValue();
@@ -3190,7 +3210,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
} }
// Done // Done
} }
/* /*
* Queries for transactions * Queries for transactions
*/ */

View File

@@ -113,6 +113,10 @@ public class ScriptServiceImpl implements ScriptService
ScriptProcessor scriptProcessor = getScriptProcessor(scriptClasspath); ScriptProcessor scriptProcessor = getScriptProcessor(scriptClasspath);
return scriptProcessor.execute(scriptClasspath, model); return scriptProcessor.execute(scriptClasspath, model);
} }
catch (ScriptException err)
{
throw err;
}
catch (Throwable err) catch (Throwable err)
{ {
throw new ScriptException("Failed to execute script '" + scriptClasspath + "': " + err.getMessage(), err); throw new ScriptException("Failed to execute script '" + scriptClasspath + "': " + err.getMessage(), err);
@@ -140,6 +144,10 @@ public class ScriptServiceImpl implements ScriptService
ScriptProcessor scriptProcessor = lookupScriptProcessor(engine); ScriptProcessor scriptProcessor = lookupScriptProcessor(engine);
return scriptProcessor.execute(scriptClasspath, model); return scriptProcessor.execute(scriptClasspath, model);
} }
catch (ScriptException err)
{
throw err;
}
catch (Throwable err) catch (Throwable err)
{ {
throw new ScriptException("Failed to execute script '" + scriptClasspath + "': " + err.getMessage(), err); throw new ScriptException("Failed to execute script '" + scriptClasspath + "': " + err.getMessage(), err);
@@ -167,6 +175,10 @@ public class ScriptServiceImpl implements ScriptService
ScriptProcessor scriptProcessor = getScriptProcessor(scriptRef); ScriptProcessor scriptProcessor = getScriptProcessor(scriptRef);
return scriptProcessor.execute(scriptRef, contentProp, model); return scriptProcessor.execute(scriptRef, contentProp, model);
} }
catch (ScriptException err)
{
throw err;
}
catch (Throwable err) catch (Throwable err)
{ {
throw new ScriptException("Failed to execute script '" + scriptRef.toString() + "': " + err.getMessage(), err); throw new ScriptException("Failed to execute script '" + scriptRef.toString() + "': " + err.getMessage(), err);
@@ -194,6 +206,10 @@ public class ScriptServiceImpl implements ScriptService
ScriptProcessor scriptProcessor = lookupScriptProcessor(engine); ScriptProcessor scriptProcessor = lookupScriptProcessor(engine);
return scriptProcessor.execute(scriptRef, contentProp, model); return scriptProcessor.execute(scriptRef, contentProp, model);
} }
catch (ScriptException err)
{
throw err;
}
catch (Throwable err) catch (Throwable err)
{ {
throw new ScriptException("Failed to execute script '" + scriptRef.toString() + "': " + err.getMessage(), err); throw new ScriptException("Failed to execute script '" + scriptRef.toString() + "': " + err.getMessage(), err);
@@ -218,6 +234,10 @@ public class ScriptServiceImpl implements ScriptService
ScriptProcessor scriptProcessor = getScriptProcessor(location.toString()); ScriptProcessor scriptProcessor = getScriptProcessor(location.toString());
return scriptProcessor.execute(location, model); return scriptProcessor.execute(location, model);
} }
catch (ScriptException err)
{
throw err;
}
catch (Throwable err) catch (Throwable err)
{ {
throw new ScriptException("Failed to execute script '" + location.toString() + "': " + err.getMessage(), err); throw new ScriptException("Failed to execute script '" + location.toString() + "': " + err.getMessage(), err);
@@ -242,6 +262,10 @@ public class ScriptServiceImpl implements ScriptService
ScriptProcessor scriptProcessor = lookupScriptProcessor(engine); ScriptProcessor scriptProcessor = lookupScriptProcessor(engine);
return scriptProcessor.execute(location, model); return scriptProcessor.execute(location, model);
} }
catch (ScriptException err)
{
throw err;
}
catch (Throwable err) catch (Throwable err)
{ {
throw new ScriptException("Failed to execute script '" + location.toString() + "': " + err.getMessage(), err); throw new ScriptException("Failed to execute script '" + location.toString() + "': " + err.getMessage(), err);
@@ -278,6 +302,10 @@ public class ScriptServiceImpl implements ScriptService
ScriptProcessor scriptProcessor = lookupScriptProcessor(engine); ScriptProcessor scriptProcessor = lookupScriptProcessor(engine);
return scriptProcessor.executeString(script, model); return scriptProcessor.executeString(script, model);
} }
catch (ScriptException err)
{
throw err;
}
catch (Throwable err) catch (Throwable err)
{ {
throw new ScriptException("Failed to execute supplied script: " + err.getMessage(), err); throw new ScriptException("Failed to execute supplied script: " + err.getMessage(), err);

View File

@@ -69,7 +69,7 @@ public class Site implements Serializable
private ScriptableQNameMap<String, CustomProperty> customProperties = null; private ScriptableQNameMap<String, CustomProperty> customProperties = null;
/** Services Registry */ /** Services Registry */
private ServiceRegistry services; private ServiceRegistry serviceRegistry;
/** Site service */ /** Site service */
private SiteService siteService; private SiteService siteService;
@@ -87,7 +87,7 @@ public class Site implements Serializable
*/ */
/*package*/ Site(SiteInfo siteInfo, ServiceRegistry serviceRegistry, SiteService siteService, Scriptable scope) /*package*/ Site(SiteInfo siteInfo, ServiceRegistry serviceRegistry, SiteService siteService, Scriptable scope)
{ {
this.services = serviceRegistry; this.serviceRegistry = serviceRegistry;
this.siteService = siteService; this.siteService = siteService;
this.siteInfo = siteInfo; this.siteInfo = siteInfo;
this.scope = scope; this.scope = scope;
@@ -186,7 +186,7 @@ public class Site implements Serializable
ScriptNode node = null; ScriptNode node = null;
if (this.siteInfo.getNodeRef() != null) if (this.siteInfo.getNodeRef() != null)
{ {
node = new ScriptNode(this.siteInfo.getNodeRef(), this.services, this.scope); node = new ScriptNode(this.siteInfo.getNodeRef(), this.serviceRegistry, this.scope);
} }
return node; return node;
@@ -336,19 +336,11 @@ public class Site implements Serializable
public ScriptNode getContainer(String componentId) public ScriptNode getContainer(String componentId)
{ {
ScriptNode container = null; ScriptNode container = null;
try NodeRef containerNodeRef = this.siteService.getContainer(getShortName(), componentId);
if (containerNodeRef != null)
{ {
NodeRef containerNodeRef = this.siteService.getContainer(getShortName(), componentId); container = new ScriptNode(containerNodeRef, this.serviceRegistry, this.scope);
if (containerNodeRef != null)
{
container = new ScriptNode(containerNodeRef, this.services, this.scope);
}
} }
catch(AlfrescoRuntimeException e)
{
// NOTE: not good practice to catch all, but in general we're not throwing exceptions
// into the script layer
}
return container; return container;
} }
@@ -385,56 +377,48 @@ public class Site implements Serializable
public ScriptNode createContainer(final String componentId, final String folderType, final Object permissions) public ScriptNode createContainer(final String componentId, final String folderType, final Object permissions)
{ {
ScriptNode container = null; ScriptNode container = null;
try NodeRef containerNodeRef = AuthenticationUtil.runAs(new RunAsWork<NodeRef>()
{ {
NodeRef containerNodeRef = AuthenticationUtil.runAs(new RunAsWork<NodeRef>() public NodeRef doWork() throws Exception
{ {
public NodeRef doWork() throws Exception // Get the container type
QName folderQName = (folderType == null) ? null : QName.createQName(folderType, serviceRegistry.getNamespaceService());
// Create the container node
NodeRef containerNodeRef = Site.this.siteService.createContainer(getShortName(), componentId, folderQName, null);
// Set any permissions that might have been provided for the container
if (permissions != null && permissions instanceof ScriptableObject)
{ {
// Get the container type ScriptableObject scriptable = (ScriptableObject)permissions;
QName folderQName = (folderType == null) ? null : QName.createQName(folderType, services.getNamespaceService()); Object[] propIds = scriptable.getIds();
for (int i = 0; i < propIds.length; i++)
// Create the container node
NodeRef containerNodeRef = Site.this.siteService.createContainer(getShortName(), componentId, folderQName, null);
// Set any permissions that might have been provided for the container
if (permissions != null && permissions instanceof ScriptableObject)
{ {
ScriptableObject scriptable = (ScriptableObject)permissions; // work on each key in turn
Object[] propIds = scriptable.getIds(); Object propId = propIds[i];
for (int i = 0; i < propIds.length; i++)
// we are only interested in keys that are formed of Strings
if (propId instanceof String)
{ {
// work on each key in turn // get the value out for the specified key - it must be String
Object propId = propIds[i]; final String key = (String)propId;
final Object value = scriptable.get(key, scriptable);
// we are only interested in keys that are formed of Strings if (value instanceof String)
if (propId instanceof String) {
{ // Set the permission on the container
// get the value out for the specified key - it must be String Site.this.serviceRegistry.getPermissionService().setPermission(containerNodeRef, key, (String)value, true);
final String key = (String)propId;
final Object value = scriptable.get(key, scriptable);
if (value instanceof String)
{
// Set the permission on the container
Site.this.services.getPermissionService().setPermission(containerNodeRef, key, (String)value, true);
}
} }
} }
} }
}
return containerNodeRef;
} return containerNodeRef;
}, AuthenticationUtil.SYSTEM_USER_NAME); }
}, AuthenticationUtil.SYSTEM_USER_NAME);
// Create the script node for the container
container = new ScriptNode(containerNodeRef, this.services, this.scope); // Create the script node for the container
} container = new ScriptNode(containerNodeRef, this.serviceRegistry, this.scope);
catch(AlfrescoRuntimeException e) return container;
{
// NOTE: not good practice to catch all, but in general we're not throwing exceptions
// into the script layer
}
return container;
} }
/** /**
@@ -445,17 +429,7 @@ public class Site implements Serializable
*/ */
public boolean hasContainer(String componentId) public boolean hasContainer(String componentId)
{ {
boolean hasContainer = false; return this.siteService.hasContainer(getShortName(), componentId);
try
{
hasContainer = this.siteService.hasContainer(getShortName(), componentId);
}
catch(AlfrescoRuntimeException e)
{
// NOTE: not good practice to catch all, but in general we're not throwing exceptions
// into the script layer
}
return hasContainer;
} }
/** /**
@@ -470,7 +444,7 @@ public class Site implements Serializable
if (permissions != null && permissions instanceof ScriptableObject) if (permissions != null && permissions instanceof ScriptableObject)
{ {
// Get the permission service // Get the permission service
final PermissionService permissionService = this.services.getPermissionService(); final PermissionService permissionService = this.serviceRegistry.getPermissionService();
if (!permissionService.getInheritParentPermissions(nodeRef)) if (!permissionService.getInheritParentPermissions(nodeRef))
{ {
@@ -525,7 +499,7 @@ public class Site implements Serializable
{ {
final NodeRef nodeRef = node.getNodeRef(); final NodeRef nodeRef = node.getNodeRef();
PermissionService permissionService = services.getPermissionService(); PermissionService permissionService = serviceRegistry.getPermissionService();
try try
{ {
// Ensure node isn't inheriting permissions from an ancestor before deleting // Ensure node isn't inheriting permissions from an ancestor before deleting
@@ -562,8 +536,8 @@ public class Site implements Serializable
if (this.customProperties == null) if (this.customProperties == null)
{ {
// create the custom properties map // create the custom properties map
ScriptNode siteNode = new ScriptNode(this.siteInfo.getNodeRef(), this.services); ScriptNode siteNode = new ScriptNode(this.siteInfo.getNodeRef(), this.serviceRegistry);
this.customProperties = new ContentAwareScriptableQNameMap<String, CustomProperty>(siteNode, this.services); this.customProperties = new ContentAwareScriptableQNameMap<String, CustomProperty>(siteNode, this.serviceRegistry);
Map<QName, Serializable> props = siteInfo.getCustomProperties(); Map<QName, Serializable> props = siteInfo.getCustomProperties();
for (QName qname : props.keySet()) for (QName qname : props.keySet())
@@ -578,7 +552,7 @@ public class Site implements Serializable
// get the type and label information from the dictionary // get the type and label information from the dictionary
String title = null; String title = null;
String type = null; String type = null;
PropertyDefinition propDef = this.services.getDictionaryService().getProperty(qname); PropertyDefinition propDef = this.serviceRegistry.getDictionaryService().getProperty(qname);
if (propDef != null) if (propDef != null)
{ {
type = propDef.getDataType().getName().toString(); type = propDef.getDataType().getName().toString();