Merged V1.4 to HEAD

svn merge svn://svn.alfresco.com:3691/alfresco/BRANCHES/V1.4@3987 svn://svn.alfresco.com:3691/alfresco/BRANCHES/V1.4@4133 .
   Removed LicenseComponent reference from projects\repository\source\java\org\alfresco\repo\descriptor\DescriptorServiceImpl.java


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@4135 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley
2006-10-17 22:42:59 +00:00
parent 4f1682e8d0
commit be167f60cf
106 changed files with 5379 additions and 2646 deletions

View File

@@ -162,7 +162,7 @@ public class CopyActionExecuter extends ActionExecuterAbstractBase
else
{
// Create a new copy of the node
this.copyService.copy(
this.copyService.copyAndRename(
actionedUponNodeRef,
destinationParent,
destinationAssocTypeQName,

View File

@@ -22,6 +22,7 @@ import java.util.Collections;
import java.util.List;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.node.integrity.IntegrityChecker;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
@@ -329,6 +330,9 @@ public abstract class AbstractPatch implements Patch
{
public String doWork() throws Exception
{
// downgrade integrity checking
IntegrityChecker.setWarnInTransaction();
String report = applyInternal();
// done
return report;
@@ -389,7 +393,8 @@ public abstract class AbstractPatch implements Patch
/**
* This method does the work. All transactions and thread-safety will be taken care of by this class.
* Any exception will result in the transaction being rolled back.
* Any exception will result in the transaction being rolled back. Integrity checks are downgraded
* for the duration of the transaction.
*
* @return Returns the report (only success messages).
* @see #apply()

View File

@@ -21,11 +21,10 @@ import java.util.List;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.i18n.I18NUtil;
import org.alfresco.util.AbstractLifecycleBean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.context.event.ContextRefreshedEvent;
/**
* This component is responsible for ensuring that patches are applied
@@ -33,7 +32,7 @@ import org.springframework.context.event.ContextRefreshedEvent;
*
* @author Derek Hulley
*/
public class PatchExecuter implements ApplicationListener
public class PatchExecuter extends AbstractLifecycleBean
{
private static final String MSG_CHECKING = "patch.executer.checking";
private static final String MSG_NO_PATCHES_REQUIRED = "patch.executer.no_patches_required";
@@ -101,16 +100,16 @@ public class PatchExecuter implements ApplicationListener
}
}
/*
* (non-Javadoc)
* @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent)
*/
public void onApplicationEvent(ApplicationEvent event)
@Override
protected void onBootstrap(ApplicationEvent event)
{
if (event instanceof ContextRefreshedEvent)
{
applyOutstandingPatches();
}
applyOutstandingPatches();
}
@Override
protected void onShutdown(ApplicationEvent event)
{
// NOOP
}
}

View File

@@ -0,0 +1,169 @@
/*
* Copyright (C) 2005 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.admin.patch.impl;
import java.util.List;
import org.alfresco.repo.admin.patch.AbstractPatch;
import org.alfresco.repo.domain.DbAccessControlEntry;
import org.alfresco.repo.domain.DbPermission;
import org.alfresco.repo.domain.hibernate.DbPermissionImpl;
import org.alfresco.service.namespace.QName;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
/**
* Provides common functionality to change a permission type and/or name.
*
* @author Derek Hulley
*/
public abstract class AbstractPermissionChangePatch extends AbstractPatch
{
private HibernateHelper helper;
public AbstractPermissionChangePatch()
{
helper = new HibernateHelper();
}
public void setSessionFactory(SessionFactory sessionFactory)
{
this.helper.setSessionFactory(sessionFactory);
}
/**
* Helper method to rename (move) a permission. This involves checking for the existence of the
* new permission and then moving all the entries to point to the new permission.
*
* @param oldTypeQName the old permission type
* @param oldName the old permission name
* @param newTypeQName the new permission type
* @param newName the new permission name
* @return Returns the number of permission entries modified
*/
protected int renamePermission(QName oldTypeQName, String oldName, QName newTypeQName, String newName)
{
return helper.createAndUpdatePermission(oldTypeQName, oldName, newTypeQName, newName);
}
/** Helper to get a permission entity */
private static class GetPermissionCallback implements HibernateCallback
{
private QName typeQName;
private String name;
public GetPermissionCallback(QName typeQName, String name)
{
this.typeQName = typeQName;
this.name = name;
}
public Object doInHibernate(Session session)
{
// flush any outstanding entities
session.flush();
Query query = session.getNamedQuery(HibernateHelper.QUERY_GET_PERMISSION);
query.setParameter("permissionTypeQName", typeQName)
.setString("permissionName", name);
return query.uniqueResult();
}
}
private static class HibernateHelper extends HibernateDaoSupport
{
private static final String QUERY_GET_PERMISSION = "permission.GetPermission";
private static final String QUERY_GET_ENTRIES_TO_CHANGE = "permission.patch.GetAccessControlEntriesToChangePermissionOn";
public int createAndUpdatePermission(
final QName oldTypeQName,
final String oldName,
final QName newTypeQName,
final String newName)
{
if (oldTypeQName.equals(newTypeQName) && oldName.equals(newName))
{
throw new IllegalArgumentException("Cannot move permission to itself: " + oldTypeQName + "-" + oldName);
}
HibernateCallback getNewPermissionCallback = new GetPermissionCallback(newTypeQName, newName);
DbPermission permission = (DbPermission) getHibernateTemplate().execute(getNewPermissionCallback);
if (permission == null)
{
// create the permission
permission = new DbPermissionImpl();
permission.setTypeQname(newTypeQName);
permission.setName(newName);
// save
getHibernateTemplate().save(permission);
}
final DbPermission newPermission = permission;
// now update all entries that refer to the old permission
HibernateCallback updateEntriesCallback = new HibernateCallback()
{
private static final int MAX_RESULTS = 1000;
@SuppressWarnings("unchecked")
public Object doInHibernate(Session session)
{
int count = 0;
while (true)
{
// flush any outstanding entities
session.flush();
Query query = session.getNamedQuery(HibernateHelper.QUERY_GET_ENTRIES_TO_CHANGE);
query.setParameter("oldTypeQName", oldTypeQName)
.setParameter("oldName", oldName)
.setMaxResults(MAX_RESULTS);
List<DbAccessControlEntry> entries = (List<DbAccessControlEntry>) query.list();
// if there are no results, then we're done
if (entries.size() == 0)
{
break;
}
for (DbAccessControlEntry entry : entries)
{
entry.setPermission(newPermission);
count++;
session.evict(entry);
}
// flush and evict all the entries
session.flush();
for (DbAccessControlEntry entry : entries)
{
session.evict(entry);
}
// next set of results
}
// done
return count;
}
};
int updateCount = (Integer) getHibernateTemplate().execute(updateEntriesCallback);
// now delete the old permission
HibernateCallback getOldPermissionCallback = new GetPermissionCallback(oldTypeQName, oldName);
DbPermission oldPermission = (DbPermission) getHibernateTemplate().execute(getOldPermissionCallback);
if (oldPermission != null)
{
getHibernateTemplate().delete(oldPermission);
}
// done
return updateCount;
}
}
}

View File

@@ -54,6 +54,10 @@ public class ActionRuleDecouplingPatch extends AbstractPatch
for (NodeRef origRuleNodeRef : resultSet.getNodeRefs())
{
// Check that this rule need updated
if (!this.nodeService.exists(origRuleNodeRef))
{
continue;
}
Map<QName, Serializable> origProperties = this.nodeService.getProperties(origRuleNodeRef);
if (origProperties.containsKey(RuleModel.PROP_EXECUTE_ASYNC) == false)
{
@@ -79,21 +83,21 @@ public class ActionRuleDecouplingPatch extends AbstractPatch
Map<QName, Serializable> newProperties = this.nodeService.getProperties(newRuleNodeRef);
// Set the rule type, execute async and applyToChildren properties on the rule
String ruleType = (String)origProperties.get(RuleModel.PROP_RULE_TYPE);
Serializable ruleType = origProperties.get(RuleModel.PROP_RULE_TYPE);
origProperties.remove(RuleModel.PROP_RULE_TYPE);
newProperties.put(RuleModel.PROP_RULE_TYPE, ruleType);
Boolean executeAsync = (Boolean)origProperties.get(ActionModel.PROP_EXECUTE_ASYNCHRONOUSLY);
Serializable executeAsync = origProperties.get(ActionModel.PROP_EXECUTE_ASYNCHRONOUSLY);
origProperties.remove(ActionModel.PROP_EXECUTE_ASYNCHRONOUSLY);
newProperties.put(RuleModel.PROP_EXECUTE_ASYNC, executeAsync);
Boolean applyToChildren = (Boolean)origProperties.get(RuleModel.PROP_APPLY_TO_CHILDREN);
Serializable applyToChildren = origProperties.get(RuleModel.PROP_APPLY_TO_CHILDREN);
origProperties.remove(RuleModel.PROP_APPLY_TO_CHILDREN);
newProperties.put(RuleModel.PROP_APPLY_TO_CHILDREN, applyToChildren);
origProperties.remove(QName.createQName(RuleModel.RULE_MODEL_URI, "owningNodeRef"));
// Move the action and description values from the composite action onto the rule
String title = (String)origProperties.get(ActionModel.PROP_ACTION_TITLE);
Serializable title = origProperties.get(ActionModel.PROP_ACTION_TITLE);
origProperties.remove(ActionModel.PROP_ACTION_TITLE);
String description = (String)origProperties.get(ActionModel.PROP_ACTION_DESCRIPTION);
Serializable description = origProperties.get(ActionModel.PROP_ACTION_DESCRIPTION);
origProperties.remove(ActionModel.PROP_ACTION_DESCRIPTION);
newProperties.put(ContentModel.PROP_TITLE, title);
newProperties.put(ContentModel.PROP_DESCRIPTION, description);

View File

@@ -16,33 +16,40 @@
*/
package org.alfresco.repo.admin.patch.impl;
import org.alfresco.repo.admin.patch.AbstractPatch;
import org.alfresco.service.cmr.admin.PatchException;
import org.hibernate.SessionFactory;
import org.alfresco.i18n.I18NUtil;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
/**
* Roles defined in permissionsDefinition.xml moved from <b>cm:content</b> to <b>sys:base</b>.
* This effects the data stored in the <b>node_perm_entry</b> table.
* <p>
* <b>WILL NOT EXECUTE ANYMORE</b>
* This effects the data stored in the <b>permission</b> table.
*
* @author Derek Hulley
*/
public class ContentPermissionPatch extends AbstractPatch
public class ContentPermissionPatch extends AbstractPermissionChangePatch
{
private static final String MSG_UPGRADE = "patch.contentPermission.upgrade";
public ContentPermissionPatch()
{
}
public void setSessionFactory(SessionFactory sessionFactory)
{
}
private static final String MSG_SUCCESS = "patch.contentPermission.result";
private static final QName TYPE_QNAME_OLD = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "content");
private static final QName TYPE_QNAME_NEW = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "base");
private static final String[] NAMES = new String[] {"Execute", "ReadContent", "WriteContent", "ExecuteContent"};
@Override
protected String applyInternal() throws Exception
{
throw new PatchException(MSG_UPGRADE);
int updateCount = 0;
for (String permissionName : NAMES)
{
updateCount += super.renamePermission(
ContentPermissionPatch.TYPE_QNAME_OLD,
permissionName,
ContentPermissionPatch.TYPE_QNAME_NEW,
permissionName);
}
// build the result message
String msg = I18NUtil.getMessage(MSG_SUCCESS, updateCount);
// done
return msg;
}
}

View File

@@ -16,35 +16,42 @@
*/
package org.alfresco.repo.admin.patch.impl;
import org.alfresco.repo.admin.patch.AbstractPatch;
import org.alfresco.service.cmr.admin.PatchException;
import org.hibernate.SessionFactory;
import org.alfresco.i18n.I18NUtil;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
/**
* The roles defined in permissionsDefinition.xml moved from <b>cm:folder</b> to <b>cm:cmobject</b>.
* This effects the data stored in the <b>node_perm_entry</b> table.
* This effects the data stored in the <b>permission</b> table.
* <p>
* JIRA: {@link http://www.alfresco.org/jira/browse/AR-344 AR-344}
* <p>
* <b>WILL NOT EXECUTE ANYMORE</b>
*
* @author Derek Hulley
*/
public class PermissionDataPatch extends AbstractPatch
public class PermissionDataPatch extends AbstractPermissionChangePatch
{
private static final String MSG_UPGRADE = "patch.updatePermissionData.upgrade";
public PermissionDataPatch()
{
}
public void setSessionFactory(SessionFactory sessionFactory)
{
}
private static final String MSG_SUCCESS = "patch.updatePermissionData.result";
private static final QName TYPE_QNAME_OLD = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "folder");
private static final QName TYPE_QNAME_NEW = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "cmobject");
private static final String[] NAMES = new String[] {"Coordinator", "Contributor", "Editor", "Guest"};
@Override
protected String applyInternal() throws Exception
{
throw new PatchException(MSG_UPGRADE);
int updateCount = 0;
for (String permissionName : NAMES)
{
updateCount += super.renamePermission(
PermissionDataPatch.TYPE_QNAME_OLD,
permissionName,
PermissionDataPatch.TYPE_QNAME_NEW,
permissionName);
}
// build the result message
String msg = I18NUtil.getMessage(MSG_SUCCESS, updateCount);
// done
return msg;
}
}

View File

@@ -16,32 +16,27 @@
*/
package org.alfresco.repo.admin.patch.impl;
import org.alfresco.repo.admin.patch.AbstractPatch;
import org.alfresco.service.cmr.admin.PatchException;
import org.hibernate.SessionFactory;
import org.alfresco.i18n.I18NUtil;
import org.alfresco.model.ContentModel;
/**
* The permission 'Guest' has been renamed to 'Consumer'.
* <p>
* <b>WILL NOT EXECUTE ANYMORE</b>
*
* @author David Caruana
* @author Derek Hulley
*/
public class UpdateGuestPermissionPatch extends AbstractPatch
public class UpdateGuestPermissionPatch extends AbstractPermissionChangePatch
{
private static final String MSG_UPGRADE = "patch.updateGuestPermission.upgrade";
public UpdateGuestPermissionPatch()
{
}
public void setSessionFactory(SessionFactory sessionFactory)
{
}
private static final String MSG_SUCCESS = "patch.updateGuestPermission.result";
@Override
protected String applyInternal() throws Exception
{
throw new PatchException(MSG_UPGRADE);
int updateCount = super.renamePermission(ContentModel.TYPE_CMOBJECT, "Guest", ContentModel.TYPE_CMOBJECT, "Consumer");
// build the result message
String msg = I18NUtil.getMessage(MSG_SUCCESS, updateCount);
// done
return msg;
}
}

View File

@@ -0,0 +1,573 @@
/**
*
*/
package org.alfresco.repo.admin.patch.util;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.util.HashMap;
import java.util.Map;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.action.ActionModel;
import org.alfresco.repo.rule.RuleModel;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.util.GUID;
import org.dom4j.io.OutputFormat;
import org.dom4j.io.XMLWriter;
import org.xml.sax.helpers.AttributesImpl;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import org.xmlpull.v1.XmlPullParserFactory;
/**
* Updates a XML import file to be compatable with the current version of the repository.
*
* @author royw
*/
public class ImportFileUpdater
{
/** Indent size **/
private static int INDENT_SIZE = 2;
/** The destination export version number **/
private static String EXPORT_VERSION = "1.4.0";
/** Element names **/
private static String NAME_EXPORTER_VERSION = "exporterVersion";
private static String NAME_RULE = "rule";
/** The current import version number **/
private String version;
/**
* Updates the passed import file into the equivalent 1.4 format.
*
* @param source the source import file
* @param destination the destination import file
*/
public void updateImportFile(String source, String destination)
{
XmlPullParser reader = getReader(source);
XMLWriter writer = getWriter(destination);
try
{
// Start the documentation
writer.startDocument();
// Start reading the document
int eventType = reader.getEventType();
while (eventType != XmlPullParser.END_DOCUMENT)
{
if (eventType == XmlPullParser.START_TAG)
{
ImportFileUpdater.this.outputCurrentElement(reader, writer, new OutputChildren());
}
eventType = reader.next();
}
// End and close the document
writer.endDocument();
writer.close();
}
catch (Exception exception)
{
throw new AlfrescoRuntimeException("Unable to update import file.", exception);
}
}
/**
* Get the reader for the source import file
*
* @param source the source import file
* @return the XML pull parser used to read the file
*/
private XmlPullParser getReader(String source)
{
try
{
XmlPullParserFactory factory = XmlPullParserFactory.newInstance(System.getProperty(XmlPullParserFactory.PROPERTY_NAME), null);
factory.setNamespaceAware(true);
XmlPullParser xpp = factory.newPullParser();
xpp.setInput(new FileReader(source));
return xpp;
}
catch (XmlPullParserException exception)
{
throw new AlfrescoRuntimeException("Unable to update import file.", exception);
}
catch (FileNotFoundException fileNotFound)
{
throw new AlfrescoRuntimeException("The source file could not be loaded.", fileNotFound);
}
}
/**
* Get the writer for the import file
*
* @param destination the destination XML import file
* @return the XML writer
*/
private XMLWriter getWriter(String destination)
{
try
{
// Define output format
OutputFormat format = OutputFormat.createPrettyPrint();
format.setNewLineAfterDeclaration(false);
format.setIndentSize(INDENT_SIZE);
format.setEncoding("UTF-8");
return new XMLWriter(new FileOutputStream(destination), format);
}
catch (Exception exception)
{
throw new AlfrescoRuntimeException("Unable to create XML writer.", exception);
}
}
private void outputCurrentElement(XmlPullParser reader, XMLWriter writer, Work work)
throws Exception
{
outputCurrentElement(reader, writer, work, true);
}
private void outputCurrentElement(XmlPullParser reader, XMLWriter writer, Work work, boolean checkForCallbacks)
throws Exception
{
if (checkForCallbacks == false || checkForCallbacks(reader, writer) == false)
{
// Get the name details of the element
String name = reader.getName();
String namespace = reader.getNamespace();
String prefix = reader.getPrefix();
// Sort out namespaces
Map<String, String> nss = new HashMap<String, String>();
int nsStart = reader.getNamespaceCount(reader.getDepth()-1);
int nsEnd = reader.getNamespaceCount(reader.getDepth());
for (int i = nsStart; i < nsEnd; i++)
{
String nsPrefix = reader.getNamespacePrefix(i);
String ns = reader.getNamespaceUri(i);
nss.put(nsPrefix, ns);
}
// Sort out attributes
AttributesImpl attributes = new AttributesImpl();
for (int i = 0; i < reader.getAttributeCount(); i++)
{
String attributeName = reader.getAttributeName(i);
String attributeNamespace = reader.getAttributeNamespace(i);
String attributePrefix = reader.getAttributePrefix(i);
String attributeType = reader.getAttributeType(i);
String attributeValue = reader.getAttributeValue(i);
attributes.addAttribute(attributeNamespace, attributeName, attributePrefix+":"+attributeName, attributeType, attributeValue);
}
// Start the namespace prefixes
for (Map.Entry<String, String> entry : nss.entrySet())
{
writer.startPrefixMapping(entry.getKey(), entry.getValue());
}
// Write the start of the element
writer.startElement(namespace, name, prefix+":"+name, attributes);
// Do the work
work.doWork(reader, writer);
// Write the end of the element
writer.endElement(namespace, name, prefix+":"+name);
// End the namespace prefixes
for (String nsPrefix : nss.keySet())
{
writer.endPrefixMapping(nsPrefix);
}
}
}
private boolean checkForCallbacks(XmlPullParser reader, XMLWriter writer)
throws Exception
{
boolean result = false;
if (reader.getName().equals(NAME_EXPORTER_VERSION) == true)
{
new ImportVersionLabelCallback().doCallback(reader, writer);
result = true;
}
else if (reader.getName().equals(NAME_RULE) == true)
{
if (this.version.startsWith("1.3") == true)
{
new RuleCallback().doCallback(reader, writer);
result = true;
}
}
return result;
}
private interface Work
{
void doWork(XmlPullParser reader, XMLWriter writer)
throws Exception;
}
private class OutputChildren implements Work
{
public void doWork(XmlPullParser reader, XMLWriter writer)
throws Exception
{
// Deal with the contents of the tag
int eventType = reader.getEventType();
while (eventType != XmlPullParser.END_TAG)
{
eventType = reader.next();
if (eventType == XmlPullParser.START_TAG)
{
ImportFileUpdater.this.outputCurrentElement(reader, writer, new OutputChildren());
}
else if (eventType == XmlPullParser.TEXT)
{
// Write the text to the output file
writer.write(reader.getText());
}
}
}
}
@SuppressWarnings("unused")
private class IgnoreChildren implements Work
{
public void doWork(XmlPullParser reader, XMLWriter writer)
throws Exception
{
int eventType = reader.getEventType();
while (eventType != XmlPullParser.END_TAG)
{
eventType = reader.next();
if (eventType == XmlPullParser.START_TAG)
{
doWork(reader, writer);
}
}
}
}
private interface ImportUpdaterCallback
{
void doCallback(XmlPullParser reader, XMLWriter writer)
throws Exception;
}
private class ImportVersionLabelCallback implements ImportUpdaterCallback
{
public void doCallback(XmlPullParser reader, XMLWriter writer)
throws Exception
{
ImportFileUpdater.this.outputCurrentElement(reader, writer,
new Work()
{
public void doWork(XmlPullParser reader, XMLWriter writer) throws Exception
{
reader.next();
ImportFileUpdater.this.version = reader.getText();
writer.write(EXPORT_VERSION);
reader.next();
}
}, false);
}
}
private class RuleCallback implements ImportUpdaterCallback
{
public void doCallback(XmlPullParser reader, XMLWriter writer)
throws Exception
{
// Get the name details of the element
String name = reader.getName();
String namespace = reader.getNamespace();
String prefix = reader.getPrefix();
// Rename the child assoc appropriately
AttributesImpl attributes = new AttributesImpl();
String attributeName = reader.getAttributeName(0);
String attributeNamespace = reader.getAttributeNamespace(0);
String attributePrefix = reader.getAttributePrefix(0);
String attributeType = reader.getAttributeType(0);
String attributeValue = reader.getAttributeValue(0) + GUID.generate();
attributes.addAttribute(attributeNamespace, attributeName, attributePrefix+":"+attributeName, attributeType, attributeValue);
// Output the rules element
writer.startElement(namespace, name, prefix+":"+name, attributes);
int eventType = reader.getEventType();
while (eventType != XmlPullParser.END_TAG)
{
eventType = reader.next();
if (eventType == XmlPullParser.START_TAG)
{
String childName = reader.getName();
if (childName.equals("aspects") == true)
{
ImportFileUpdater.this.outputCurrentElement(reader, writer,
new Work()
{
public void doWork(XmlPullParser reader, XMLWriter writer) throws Exception
{
// Add titled aspect
writer.startElement(
ContentModel.ASPECT_TITLED.getNamespaceURI(),
ContentModel.ASPECT_TITLED.getLocalName(),
NamespaceService.CONTENT_MODEL_PREFIX + ":" + ContentModel.ASPECT_TITLED.getLocalName(),
new AttributesImpl());
writer.endElement(
ContentModel.ASPECT_TITLED.getNamespaceURI(),
ContentModel.ASPECT_TITLED.getLocalName(),
NamespaceService.CONTENT_MODEL_PREFIX + ":" + ContentModel.ASPECT_TITLED.getLocalName());
// Read the rest of the elements and output
int eventType = reader.getEventType();
while (eventType != XmlPullParser.END_TAG)
{
eventType = reader.next();
if (eventType == XmlPullParser.START_TAG)
{
ImportFileUpdater.this.outputCurrentElement(reader, writer, new OutputChildren());
}
}
}
}, false);
}
else if (childName.equals("properties") == true)
{
ImportFileUpdater.this.outputCurrentElement(reader, writer,
new Work()
{
public void doWork(XmlPullParser reader, XMLWriter writer) throws Exception
{
int eventType = reader.getEventType();
while (eventType != XmlPullParser.END_TAG)
{
eventType = reader.next();
if (eventType == XmlPullParser.START_TAG)
{
String propName = reader.getName();
if (propName.equals("actionDescription") == true)
{
writer.startElement(
ContentModel.PROP_DESCRIPTION.getNamespaceURI(),
ContentModel.PROP_DESCRIPTION.getLocalName(),
NamespaceService.CONTENT_MODEL_PREFIX + ":" + ContentModel.PROP_DESCRIPTION.getLocalName(),
new AttributesImpl());
// Output the value within
new OutputChildren().doWork(reader, writer);
writer.endElement(
ContentModel.PROP_DESCRIPTION.getNamespaceURI(),
ContentModel.PROP_DESCRIPTION.getLocalName(),
NamespaceService.CONTENT_MODEL_PREFIX + ":" + ContentModel.PROP_DESCRIPTION.getLocalName());
eventType = reader.next();
}
else if (propName.equals("actionTitle") == true)
{
writer.startElement(
ContentModel.PROP_TITLE.getNamespaceURI(),
ContentModel.PROP_TITLE.getLocalName(),
NamespaceService.CONTENT_MODEL_PREFIX + ":" + ContentModel.PROP_TITLE.getLocalName(),
new AttributesImpl());
// Output the value within
new OutputChildren().doWork(reader, writer);
writer.endElement(
ContentModel.PROP_TITLE.getNamespaceURI(),
ContentModel.PROP_TITLE.getLocalName(),
NamespaceService.CONTENT_MODEL_PREFIX + ":" + ContentModel.PROP_TITLE.getLocalName());
eventType = reader.next();
}
else if (propName.equals("executeAsynchronously") == true)
{
writer.startElement(
RuleModel.PROP_EXECUTE_ASYNC.getNamespaceURI(),
RuleModel.PROP_EXECUTE_ASYNC.getLocalName(),
RuleModel.RULE_MODEL_PREFIX + ":" + RuleModel.PROP_EXECUTE_ASYNC.getLocalName(),
new AttributesImpl());
// Output the value within
new OutputChildren().doWork(reader, writer);
writer.endElement(
RuleModel.PROP_EXECUTE_ASYNC.getNamespaceURI(),
RuleModel.PROP_EXECUTE_ASYNC.getLocalName(),
RuleModel.RULE_MODEL_PREFIX + ":" + RuleModel.PROP_EXECUTE_ASYNC.getLocalName());
eventType = reader.next();
}
else if (propName.equals("ruleType") == true)
{
ImportFileUpdater.this.outputCurrentElement(reader, writer,
new Work()
{
public void doWork(XmlPullParser reader, XMLWriter writer) throws Exception
{
// Output the elements that contain a multi values property
writer.startElement(NamespaceService.REPOSITORY_VIEW_1_0_URI, "values", "view:values", new AttributesImpl());
writer.startElement(NamespaceService.REPOSITORY_VIEW_1_0_URI, "value", "view:value", new AttributesImpl());
// Output the value within
new OutputChildren().doWork(reader, writer);
// End the multi values elements
writer.endElement(NamespaceService.REPOSITORY_VIEW_PREFIX, "value", "view:value");
writer.endElement(NamespaceService.REPOSITORY_VIEW_PREFIX, "values", "view:values");
}
}, false);
}
else if (propName.equals("definitionName") == true)
{
// Skip past next end
while (eventType != XmlPullParser.END_TAG)
{
eventType = reader.next();
}
eventType = reader.next();
}
else
{
ImportFileUpdater.this.outputCurrentElement(reader, writer, new OutputChildren());
}
}
}
// Output value for the disabled property
writer.startElement(
RuleModel.PROP_DISABLED.getNamespaceURI(),
RuleModel.PROP_DISABLED.getLocalName(),
RuleModel.RULE_MODEL_PREFIX + ":" + RuleModel.PROP_DISABLED.getLocalName(),
new AttributesImpl());
writer.write("false");
writer.endElement(
RuleModel.PROP_DISABLED.getNamespaceURI(),
RuleModel.PROP_DISABLED.getLocalName(),
RuleModel.RULE_MODEL_PREFIX + ":" + RuleModel.PROP_DISABLED.getLocalName());
}
}, false);
}
else if (childName.equals("associations") == true)
{
ImportFileUpdater.this.outputCurrentElement(reader, writer,
new Work()
{
public void doWork(XmlPullParser reader, XMLWriter writer) throws Exception
{
// <rule:action>
writer.startElement(
RuleModel.ASSOC_ACTION.getNamespaceURI(),
RuleModel.ASSOC_ACTION.getLocalName(),
RuleModel.RULE_MODEL_PREFIX + ":" + RuleModel.ASSOC_ACTION.getLocalName(),
new AttributesImpl());
// <act:compositeaction view:childName="rule:action">
AttributesImpl attributes = new AttributesImpl();
attributes.addAttribute(NamespaceService.REPOSITORY_VIEW_1_0_URI, "childName", "view:childName", null, "rule:action");
writer.startElement(
ActionModel.TYPE_COMPOSITE_ACTION.getNamespaceURI(),
ActionModel.TYPE_COMPOSITE_ACTION.getLocalName(),
ActionModel.ACTION_MODEL_PREFIX+ ":" + ActionModel.TYPE_COMPOSITE_ACTION.getLocalName(),
attributes);
// <view:properties>
writer.startElement(
NamespaceService.REPOSITORY_VIEW_1_0_URI,
"properties",
"view:properties",
new AttributesImpl());
// <act:definitionName>composite-action</definitionName>
writer.startElement(
ActionModel.PROP_DEFINITION_NAME.getNamespaceURI(),
ActionModel.PROP_DEFINITION_NAME.getLocalName(),
ActionModel.ACTION_MODEL_PREFIX + ":" + ActionModel.PROP_DEFINITION_NAME.getLocalName(),
new AttributesImpl());
writer.write("composite-action");
writer.endElement(
ActionModel.PROP_DEFINITION_NAME.getNamespaceURI(),
ActionModel.PROP_DEFINITION_NAME.getLocalName(),
ActionModel.ACTION_MODEL_PREFIX + ":" + ActionModel.PROP_DEFINITION_NAME.getLocalName());
// </view:properties>
writer.endElement(
NamespaceService.REPOSITORY_VIEW_1_0_URI,
"properties",
"view:properties");
// <view:association>
writer.startElement(
NamespaceService.REPOSITORY_VIEW_1_0_URI,
"associations",
"view:associations",
new AttributesImpl());
// Output the association details
new OutputChildren().doWork(reader, writer);
// </view:association>
writer.endElement(
NamespaceService.REPOSITORY_VIEW_1_0_URI,
"associations",
"view:associations");
// </act:compositeaction>
writer.endElement(
ActionModel.TYPE_COMPOSITE_ACTION.getNamespaceURI(),
ActionModel.TYPE_COMPOSITE_ACTION.getLocalName(),
ActionModel.ACTION_MODEL_PREFIX+ ":" + ActionModel.TYPE_COMPOSITE_ACTION.getLocalName());
// </rule:action>
writer.endElement(
RuleModel.ASSOC_ACTION.getNamespaceURI(),
RuleModel.ASSOC_ACTION.getLocalName(),
RuleModel.RULE_MODEL_PREFIX + ":" + RuleModel.ASSOC_ACTION.getLocalName());
}
}, false);
}
else
{
// Output anything else that might be hanging araound
ImportFileUpdater.this.outputCurrentElement(reader, writer, new OutputChildren());
}
}
}
// End the rules element
writer.endElement(namespace, name, prefix+":"+name);
}
}
public static void main(String[] args)
{
if (args.length == 2)
{
ImportFileUpdater util = new ImportFileUpdater();
util.updateImportFile(args[0], args[1]);
}
else
{
System.out.println(" ImportFileUpdater <destination> <source>");
System.out.println(" source - 1.3 import file name to be updated");
System.out.println(" destination - name of the generated 1.4 import file");
}
}
}

View File

@@ -25,6 +25,7 @@ import java.util.Map;
import java.util.Set;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.i18n.I18NUtil;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.policy.ClassPolicyDelegate;
import org.alfresco.repo.policy.JavaBehaviour;
@@ -43,6 +44,7 @@ import org.alfresco.service.cmr.repository.AssociationRef;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.CopyService;
import org.alfresco.service.cmr.repository.CopyServiceException;
import org.alfresco.service.cmr.repository.DuplicateChildNodeNameException;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
@@ -65,8 +67,12 @@ import org.alfresco.util.ParameterCheck;
*/
public class CopyServiceImpl implements CopyService
{
/** I18N labels */
private String COPY_OF_LABEL = "copy_service.copy_of_label";
/** The node service */
private NodeService nodeService;
private NodeService internalNodeService;
/** The dictionary service*/
private DictionaryService dictionaryService;
@@ -99,6 +105,16 @@ public class CopyServiceImpl implements CopyService
{
this.nodeService = nodeService;
}
/**
* Sets the internal node service
*
* @param internalNodeService the internal node service
*/
public void setInternalNodeService(NodeService internalNodeService)
{
this.internalNodeService = internalNodeService;
}
/**
* Sets the dictionary service
@@ -233,7 +249,32 @@ public class CopyServiceImpl implements CopyService
return copy;
}
public NodeRef copyAndRename(NodeRef sourceNodeRef, NodeRef destinationParent, QName destinationAssocTypeQName, QName destinationQName, boolean copyChildren)
{
// Make a note of the source name and do the copy
String sourceName = (String)this.internalNodeService.getProperty(sourceNodeRef, ContentModel.PROP_NAME);
NodeRef copy = copy(sourceNodeRef, destinationParent, destinationAssocTypeQName, destinationQName, copyChildren);
// Do the rename, iterating until a non-duplicate name is found
boolean bDone = false;
while (bDone == false)
{
try
{
this.internalNodeService.setProperty(copy, ContentModel.PROP_NAME, sourceName);
bDone = true;
}
catch(DuplicateChildNodeNameException exception)
{
sourceName = I18NUtil.getMessage(COPY_OF_LABEL, sourceName);
}
}
// Return the copy
return copy;
}
/**
* Invokes the copy complete policy for the node reference provided
*

View File

@@ -104,6 +104,7 @@ public class CopyServiceImplTest extends BaseSpringTest
private static final QName TEST_MANDATORY_ASPECT_QNAME = QName.createQName(TEST_TYPE_NAMESPACE, "testMandatoryAspect");
private static final QName PROP5_QNAME_MANDATORY = QName.createQName(TEST_TYPE_NAMESPACE, "prop5Mandatory");
private static final String TEST_NAME = "testName";
private static final String TEST_VALUE_1 = "testValue1";
private static final String TEST_VALUE_2 = "testValue2";
private static final String TEST_VALUE_3 = "testValue3";
@@ -239,6 +240,7 @@ public class CopyServiceImplTest extends BaseSpringTest
private Map<QName, Serializable> createTypePropertyBag()
{
Map<QName, Serializable> result = new HashMap<QName, Serializable>();
result.put(ContentModel.PROP_NAME, TEST_NAME);
result.put(PROP1_QNAME_MANDATORY, TEST_VALUE_1);
result.put(PROP2_QNAME_OPTIONAL, TEST_VALUE_2);
result.put(PROP5_QNAME_MANDATORY, TEST_VALUE_3);
@@ -624,6 +626,31 @@ public class CopyServiceImplTest extends BaseSpringTest
assertNotNull(value);
assertEquals(nodeTwoCopy, value);
}
public void testCopyAndRename()
{
// Check a normal copy with no dup restrictions
NodeRef copy = this.copyService.copyAndRename(
this.sourceNodeRef,
this.rootNodeRef,
ContentModel.ASSOC_CHILDREN,
QName.createQName("{test}copyAssoc"),
false);
checkCopiedNode(this.sourceNodeRef, copy, true, true, false);
assertTrue(TEST_NAME.equals(this.nodeService.getProperty(copy, ContentModel.PROP_NAME)));
// Create a folder and content node
Map<QName, Serializable> propsFolder = new HashMap<QName, Serializable>(1);
propsFolder.put(ContentModel.PROP_NAME, "tempFolder");
NodeRef folderNode = this.nodeService.createNode(this.rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{test}tempFolder"), ContentModel.TYPE_FOLDER, propsFolder).getChildRef();
Map<QName, Serializable> props = new HashMap<QName, Serializable>(1);
props.put(ContentModel.PROP_NAME, TEST_NAME);
NodeRef contentNode = this.nodeService.createNode(folderNode, ContentModel.ASSOC_CONTAINS, QName.createQName("{test}renametest"), ContentModel.TYPE_CONTENT, props).getChildRef();
// Now copy the content node with the duplicate name restriction
NodeRef contentCopy = this.copyService.copy(contentNode, folderNode, ContentModel.ASSOC_CONTAINS, QName.createQName("{test}bobbins"), false);
assertFalse(TEST_NAME.equals(this.nodeService.getProperty(contentCopy, ContentModel.PROP_NAME)));
}
/**
* Check that the copied node contains the state we are expecting

View File

@@ -41,16 +41,12 @@ import org.alfresco.service.license.LicenseService;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.AbstractLifecycleBean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.context.event.ContextRefreshedEvent;
import org.springframework.core.io.Resource;
@@ -59,12 +55,10 @@ import org.springframework.core.io.Resource;
*
* @author David Caruana
*/
public class DescriptorServiceImpl implements DescriptorService, ApplicationListener, InitializingBean, ApplicationContextAware, DisposableBean
public class DescriptorServiceImpl extends AbstractLifecycleBean implements DescriptorService, InitializingBean
{
private static Log logger = LogFactory.getLog(DescriptorServiceImpl.class);
private ApplicationContext applicationContext;
private Properties serverProperties;
private ImporterBootstrap systemBootstrap;
@@ -78,14 +72,6 @@ public class DescriptorServiceImpl implements DescriptorService, ApplicationList
private Descriptor installedRepoDescriptor;
/* (non-Javadoc)
* @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext)
*/
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException
{
this.applicationContext = applicationContext;
}
/**
* Sets the server descriptor from a resource file
*
@@ -163,36 +149,36 @@ public class DescriptorServiceImpl implements DescriptorService, ApplicationList
return (licenseService == null) ? null : licenseService.getLicense();
}
/**
* @param event
*/
public void onApplicationEvent(ApplicationEvent event)
@Override
protected void onBootstrap(ApplicationEvent event)
{
if (event instanceof ContextRefreshedEvent)
// initialise the repository descriptor
// note: this requires that the repository schema has already been initialised
TransactionWork<Descriptor> createDescriptorWork = new TransactionUtil.TransactionWork<Descriptor>()
{
// initialise the repository descriptor
// note: this requires that the repository schema has already been initialised
TransactionWork<Descriptor> createDescriptorWork = new TransactionUtil.TransactionWork<Descriptor>()
public Descriptor doWork()
{
public Descriptor doWork()
{
// initialise license service (if installed)
initialiseLicenseService();
// verify license, but only if license component is installed
licenseService.verifyLicense();
// persist the server descriptor values
updateCurrentRepositoryDescriptor(serverDescriptor);
// initialise license service (if installed)
initialiseLicenseService();
// verify license, but only if license component is installed
licenseService.verifyLicense();
// persist the server descriptor values
updateCurrentRepositoryDescriptor(serverDescriptor);
// return the repository installed descriptor
return createInstalledRepositoryDescriptor();
}
};
installedRepoDescriptor = TransactionUtil.executeInUserTransaction(transactionService, createDescriptorWork);
}
// return the repository installed descriptor
return createInstalledRepositoryDescriptor();
}
};
installedRepoDescriptor = TransactionUtil.executeInUserTransaction(transactionService, createDescriptorWork);
}
@Override
protected void onShutdown(ApplicationEvent event)
{
}
/**
* Initialise Descriptors
*/
@@ -202,13 +188,6 @@ public class DescriptorServiceImpl implements DescriptorService, ApplicationList
serverDescriptor = createServerDescriptor();
}
/**
* Destruction hook
*/
public void destroy() throws Exception
{
}
/**
* Create server descriptor
*
@@ -358,7 +337,7 @@ public class DescriptorServiceImpl implements DescriptorService, ApplicationList
// be declaratively taken out in an installed environment.
Class licenseComponentClass = Class.forName("org.alfresco.license.LicenseComponent");
Constructor constructor = licenseComponentClass.getConstructor(new Class[] { ApplicationContext.class} );
licenseService = (LicenseService)constructor.newInstance(new Object[] { applicationContext });
licenseService = (LicenseService)constructor.newInstance(new Object[] { getApplicationContext() });
}
catch (ClassNotFoundException e)
{
@@ -766,4 +745,5 @@ public class DescriptorServiceImpl implements DescriptorService, ApplicationList
return serverProperties.getProperty(key, "");
}
}
}

View File

@@ -23,11 +23,10 @@ import java.util.Map;
import org.alfresco.service.descriptor.Descriptor;
import org.alfresco.service.descriptor.DescriptorService;
import org.alfresco.service.license.LicenseDescriptor;
import org.alfresco.util.AbstractLifecycleBean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.context.event.ContextRefreshedEvent;
/**
@@ -35,7 +34,7 @@ import org.springframework.context.event.ContextRefreshedEvent;
*
* @author davidc
*/
public class DescriptorStartupLog implements ApplicationListener
public class DescriptorStartupLog extends AbstractLifecycleBean
{
// Logger
private static final Log logger = LogFactory.getLog(DescriptorService.class);
@@ -52,82 +51,6 @@ public class DescriptorStartupLog implements ApplicationListener
}
/**
* @param event
*/
public void onApplicationEvent(ApplicationEvent event)
{
if (event instanceof ContextRefreshedEvent)
{
//
// log output of VM stats
//
Map properties = System.getProperties();
String version = (properties.get("java.runtime.version") == null) ? "unknown" : (String)properties.get("java.runtime.version");
long maxHeap = Runtime.getRuntime().maxMemory();
float maxHeapMB = maxHeap / 1024l;
maxHeapMB = maxHeapMB / 1024l;
if (logger.isInfoEnabled())
{
logger.info(String.format("Alfresco JVM - v%s; maximum heap size %.3fMB", version, maxHeapMB));
}
if (logger.isWarnEnabled())
{
if (version.startsWith("1.2") || version.startsWith("1.3") || version.startsWith("1.4"))
{
logger.warn(String.format("Alfresco JVM - WARNING - v1.5 is required; currently using v%s", version));
}
if (maxHeapMB < 500)
{
logger.warn(String.format("Alfresco JVM - WARNING - maximum heap size %.3fMB is less than recommended 512MB", maxHeapMB));
}
}
// Log License Descriptors (if applicable)
LicenseDescriptor license = descriptorService.getLicenseDescriptor();
if (license != null && logger.isInfoEnabled())
{
String subject = license.getSubject();
String msg = "Alfresco license: " + subject;
String holder = getHolderOrganisation(license.getHolder());
if (holder != null)
{
msg += " granted to " + holder;
}
Date validUntil = license.getValidUntil();
if (validUntil != null)
{
Integer days = license.getDays();
Integer remainingDays = license.getRemainingDays();
msg += " limited to " + days + " days expiring " + validUntil + " (" + remainingDays + " days remaining)";
}
else
{
msg += " (does not expire)";
}
logger.info(msg);
}
// Log Repository Descriptors
if (logger.isInfoEnabled())
{
Descriptor serverDescriptor = descriptorService.getServerDescriptor();
Descriptor installedRepoDescriptor = descriptorService.getInstalledRepositoryDescriptor();
String serverEdition = serverDescriptor.getEdition();
String serverVersion = serverDescriptor.getVersion();
int serverSchemaVersion = serverDescriptor.getSchema();
String installedRepoVersion = installedRepoDescriptor.getVersion();
int installedSchemaVersion = installedRepoDescriptor.getSchema();
logger.info(String.format("Alfresco started (%s): Current version %s schema %d - Installed version %s schema %d",
serverEdition, serverVersion, serverSchemaVersion, installedRepoVersion, installedSchemaVersion));
}
}
}
/**
* Get Organisation from Principal
*
@@ -156,5 +79,83 @@ public class DescriptorStartupLog implements ApplicationListener
return holder;
}
@Override
protected void onBootstrap(ApplicationEvent event)
{
//
// log output of VM stats
//
Map properties = System.getProperties();
String version = (properties.get("java.runtime.version") == null) ? "unknown" : (String)properties.get("java.runtime.version");
long maxHeap = Runtime.getRuntime().maxMemory();
float maxHeapMB = maxHeap / 1024l;
maxHeapMB = maxHeapMB / 1024l;
if (logger.isInfoEnabled())
{
logger.info(String.format("Alfresco JVM - v%s; maximum heap size %.3fMB", version, maxHeapMB));
}
if (logger.isWarnEnabled())
{
if (version.startsWith("1.2") || version.startsWith("1.3") || version.startsWith("1.4"))
{
logger.warn(String.format("Alfresco JVM - WARNING - v1.5 is required; currently using v%s", version));
}
if (maxHeapMB < 500)
{
logger.warn(String.format("Alfresco JVM - WARNING - maximum heap size %.3fMB is less than recommended 512MB", maxHeapMB));
}
}
// Log License Descriptors (if applicable)
LicenseDescriptor license = descriptorService.getLicenseDescriptor();
if (license != null && logger.isInfoEnabled())
{
String subject = license.getSubject();
String msg = "Alfresco license: " + subject;
String holder = getHolderOrganisation(license.getHolder());
if (holder != null)
{
msg += " granted to " + holder;
}
Date validUntil = license.getValidUntil();
if (validUntil != null)
{
Integer days = license.getDays();
Integer remainingDays = license.getRemainingDays();
msg += " limited to " + days + " days expiring " + validUntil + " (" + remainingDays + " days remaining)";
}
else
{
msg += " (does not expire)";
}
logger.info(msg);
}
// Log Repository Descriptors
if (logger.isInfoEnabled())
{
Descriptor serverDescriptor = descriptorService.getServerDescriptor();
Descriptor installedRepoDescriptor = descriptorService.getInstalledRepositoryDescriptor();
String serverEdition = serverDescriptor.getEdition();
String serverVersion = serverDescriptor.getVersion();
int serverSchemaVersion = serverDescriptor.getSchema();
String installedRepoVersion = installedRepoDescriptor.getVersion();
int installedSchemaVersion = installedRepoDescriptor.getSchema();
logger.info(String.format("Alfresco started (%s): Current version %s schema %d - Installed version %s schema %d",
serverEdition, serverVersion, serverSchemaVersion, installedRepoVersion, installedSchemaVersion));
}
}
@Override
protected void onShutdown(ApplicationEvent event)
{
// NOOP
}
}

View File

@@ -16,6 +16,7 @@
*/
package org.alfresco.repo.dictionary;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -272,5 +273,26 @@ public class DictionaryDAOTest extends TestCase
assertFalse(test5);
}
public void testPropertyOverride()
{
TypeDefinition type1 = service.getType(QName.createQName(TEST_URL, "overridetype1"));
Map<QName, PropertyDefinition> props1 = type1.getProperties();
PropertyDefinition prop1 = props1.get(QName.createQName(TEST_URL, "propoverride"));
String def1 = prop1.getDefaultValue();
assertEquals("one", def1);
TypeDefinition type2 = service.getType(QName.createQName(TEST_URL, "overridetype2"));
Map<QName, PropertyDefinition> props2 = type2.getProperties();
PropertyDefinition prop2 = props2.get(QName.createQName(TEST_URL, "propoverride"));
String def2 = prop2.getDefaultValue();
assertEquals("two", def2);
TypeDefinition type3 = service.getType(QName.createQName(TEST_URL, "overridetype3"));
Map<QName, PropertyDefinition> props3 = type3.getProperties();
PropertyDefinition prop3 = props3.get(QName.createQName(TEST_URL, "propoverride"));
String def3 = prop3.getDefaultValue();
assertEquals("three", def3);
}
}

View File

@@ -43,6 +43,8 @@ public class TestModel
bootstrapModels.add("alfresco/model/systemModel.xml");
bootstrapModels.add("alfresco/model/contentModel.xml");
bootstrapModels.add("alfresco/model/applicationModel.xml");
bootstrapModels.add("alfresco/model/bpmModel.xml");
bootstrapModels.add("alfresco/workflow/workflowModel.xml");
// include models specified on command line
for (String arg: args)

View File

@@ -188,6 +188,33 @@
</properties>
</type>
<type name="test:overridetype1">
<properties>
<property name="test:propoverride">
<type>d:text</type>
<default>one</default>
</property>
</properties>
</type>
<type name="test:overridetype2">
<parent>test:overridetype1</parent>
<overrides>
<property name="test:propoverride">
<default>two</default>
</property>
</overrides>
</type>
<type name="test:overridetype3">
<parent>test:overridetype2</parent>
<overrides>
<property name="test:propoverride">
<default>three</default>
</property>
</overrides>
</type>
</types>
<aspects>

View File

@@ -144,4 +144,14 @@
ace.authority.recipient = :authorityRecipient
</query>
<query name="permission.patch.GetAccessControlEntriesToChangePermissionOn" >
select
entry
from
org.alfresco.repo.domain.hibernate.DbAccessControlEntryImpl entry
where
entry.permission.typeQname = :oldTypeQName and
entry.permission.name = :oldName
</query>
</hibernate-mapping>

View File

@@ -70,6 +70,14 @@
status.key.identifier = :identifier
</query>
<query name="txn.GetLastTxnId">
select
max(txn.id)
from
org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
join status.transaction as txn
</query>
<query name="txn.CountTransactions">
select
count(txn.id)
@@ -90,6 +98,21 @@
]]>
</query>
<query name="txn.GetNextRemoteTxns">
<![CDATA[
select
txn
from
org.alfresco.repo.domain.hibernate.TransactionImpl as txn
join txn.server as server
where
txn.id > :lastTxnId and
server.ipAddress != :serverIpAddress
order by
txn.id
]]>
</query>
<query name="txn.GetTxnUpdateCountForStore">
select
count(status.key.guid)
@@ -98,9 +121,7 @@
join status.transaction as txn
where
txn.id = :txnId and
status.node is not null and
status.key.protocol = :protocol and
status.key.identifier = :identifier
status.node is not null
</query>
<query name="txn.GetTxnDeleteCountForStore">
@@ -111,9 +132,7 @@
join status.transaction as txn
where
txn.id = :txnId and
status.node is null and
status.key.protocol = :protocol and
status.key.identifier = :identifier
status.node is null
</query>
<query name="txn.GetTxnChangesForStore">

View File

@@ -25,7 +25,9 @@ import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Writer;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
@@ -33,6 +35,9 @@ import java.util.List;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.i18n.I18NUtil;
import org.alfresco.repo.admin.patch.impl.SchemaUpgradeScriptPatch;
import org.alfresco.repo.content.filestore.FileContentWriter;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.util.AbstractLifecycleBean;
import org.alfresco.util.TempFileProvider;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -45,8 +50,6 @@ import org.hibernate.tool.hbm2ddl.DatabaseMetadata;
import org.hibernate.tool.hbm2ddl.SchemaExport;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.context.event.ContextRefreshedEvent;
import org.springframework.core.io.Resource;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.io.support.ResourcePatternResolver;
@@ -58,12 +61,13 @@ import org.springframework.orm.hibernate3.LocalSessionFactoryBean;
*
* @author Derek Hulley
*/
public class SchemaBootstrap implements ApplicationListener
public class SchemaBootstrap extends AbstractLifecycleBean
{
/** The placeholder for the configured <code>Dialect</code> class name: <b>${db.script.dialect}</b> */
private static final String PLACEHOLDER_SCRIPT_DIALECT = "\\$\\{db\\.script\\.dialect\\}";
private static final String MSG_EXECUTING_SCRIPT = "schema.update.msg.executing_script";
private static final String ERR_STATEMENT_FAILED = "schema.update.err.statement_failed";
private static final String ERR_UPDATE_FAILED = "schema.update.err.update_failed";
private static final String ERR_VALIDATION_FAILED = "schema.update.err.validation_failed";
private static final String ERR_SCRIPT_NOT_RUN = "schema.update.err.update_script_not_run";
@@ -148,58 +152,6 @@ public class SchemaBootstrap implements ApplicationListener
this.applyUpdateScriptPatches = scriptPatches;
}
public void onApplicationEvent(ApplicationEvent event)
{
if (!(event instanceof ContextRefreshedEvent))
{
// only work on startup
return;
}
// do everything in a transaction
Session session = getLocalSessionFactory().openSession();
Transaction transaction = session.beginTransaction();
try
{
// make sure that we don't autocommit
Connection connection = session.connection();
connection.setAutoCommit(false);
Configuration cfg = localSessionFactory.getConfiguration();
// dump the schema, if required
if (schemaOuputFilename != null)
{
File schemaOutputFile = new File(schemaOuputFilename);
dumpSchemaCreate(cfg, schemaOutputFile);
}
// update the schema, if required
if (updateSchema)
{
updateSchema(cfg, session, connection);
}
// verify that all patches have been applied correctly
checkSchemaPatchScripts(cfg, session, connection, validateUpdateScriptPatches, false); // check scripts
checkSchemaPatchScripts(cfg, session, connection, applyUpdateScriptPatches, false); // check scripts
// all done successfully
transaction.commit();
}
catch (Throwable e)
{
try { transaction.rollback(); } catch (Throwable ee) {}
if (updateSchema)
{
throw new AlfrescoRuntimeException(ERR_UPDATE_FAILED, e);
}
else
{
throw new AlfrescoRuntimeException(ERR_VALIDATION_FAILED, e);
}
}
}
private void dumpSchemaCreate(Configuration cfg, File schemaOutputFile)
{
// if the file exists, delete it
@@ -220,39 +172,78 @@ public class SchemaBootstrap implements ApplicationListener
return (SessionFactory) localSessionFactory.getObject();
}
private static class NoSchemaException extends Exception
{
private static final long serialVersionUID = 5574280159910824660L;
}
/**
* @return Returns the number of applied patches
*/
private int countAppliedPatches(Connection connection) throws Exception
{
Statement stmt = connection.createStatement();
DatabaseMetaData dbMetadata = connection.getMetaData();
ResultSet tableRs = dbMetadata.getTables(null, null, "%", null);
boolean newPatchTable = false;
boolean oldPatchTable = false;
try
{
ResultSet rs = stmt.executeQuery("select count(id) from alf_applied_patch");
rs.next();
int count = rs.getInt(1);
return count;
}
catch (Throwable e)
{
// we'll try another table name
while (tableRs.next())
{
String tableName = tableRs.getString("TABLE_NAME");
if (tableName.equalsIgnoreCase("applied_patch"))
{
oldPatchTable = true;
break;
}
else if (tableName.equalsIgnoreCase("alf_applied_patch"))
{
newPatchTable = true;
break;
}
}
}
finally
{
try { stmt.close(); } catch (Throwable e) {}
try { tableRs.close(); } catch (Throwable e) {e.printStackTrace(); }
}
// for pre-1.4 databases, the table was named differently
stmt = connection.createStatement();
try
if (newPatchTable)
{
ResultSet rs = stmt.executeQuery("select count(id) from applied_patch");
rs.next();
int count = rs.getInt(1);
return count;
Statement stmt = connection.createStatement();
try
{
ResultSet rs = stmt.executeQuery("select count(id) from alf_applied_patch");
rs.next();
int count = rs.getInt(1);
return count;
}
finally
{
try { stmt.close(); } catch (Throwable e) {}
}
}
finally
else if (oldPatchTable)
{
try { stmt.close(); } catch (Throwable e) {}
// found the old style table name
Statement stmt = connection.createStatement();
try
{
ResultSet rs = stmt.executeQuery("select count(id) from applied_patch");
rs.next();
int count = rs.getInt(1);
return count;
}
finally
{
try { stmt.close(); } catch (Throwable e) {}
}
}
else
{
// The applied patches table is not present
throw new NoSchemaException();
}
}
@@ -308,22 +299,21 @@ public class SchemaBootstrap implements ApplicationListener
{
countAppliedPatches(connection);
}
catch (Throwable e)
catch (NoSchemaException e)
{
create = true;
}
// Get the dialect
final Dialect dialect = Dialect.getDialect(cfg.getProperties());
String dialectStr = dialect.getClass().getName();
if (create)
{
// Get the dialect
final Dialect dialect = Dialect.getDialect(cfg.getProperties());
String dialectStr = dialect.getClass().getName();
// the applied patch table is missing - we assume that all other tables are missing
// perform a full update using Hibernate-generated statements
File tempFile = TempFileProvider.createTempFile("AlfrescoSchemaCreate-" + dialectStr + "-", ".sql");
dumpSchemaCreate(cfg, tempFile);
FileInputStream tempInputStream = new FileInputStream(tempFile);
executeScriptFile(cfg, connection, tempInputStream, tempFile.getPath());
executeScriptFile(cfg, connection, tempFile, tempFile.getPath());
// execute post-create scripts (not patches)
for (String scriptUrl : this.postCreateScriptUrls)
{
@@ -340,12 +330,11 @@ public class SchemaBootstrap implements ApplicationListener
Writer writer = null;
try
{
final Dialect dialect = Dialect.getDialect(cfg.getProperties());
DatabaseMetadata metadata = new DatabaseMetadata(connection, dialect);
String[] sqls = cfg.generateSchemaUpdateScript(dialect, metadata);
if (sqls.length > 0)
{
tempFile = TempFileProvider.createTempFile("AlfrescoSchemaUpdate", ".sql");
tempFile = TempFileProvider.createTempFile("AlfrescoSchemaUpdate-" + dialectStr + "-", ".sql");
writer = new BufferedWriter(new FileWriter(tempFile));
for (String sql : sqls)
{
@@ -364,8 +353,7 @@ public class SchemaBootstrap implements ApplicationListener
// execute if there were changes raised by Hibernate
if (tempFile != null)
{
InputStream tempInputStream = new FileInputStream(tempFile);
executeScriptFile(cfg, connection, tempInputStream, tempFile.getPath());
executeScriptFile(cfg, connection, tempFile, tempFile.getPath());
}
}
}
@@ -414,14 +402,27 @@ public class SchemaBootstrap implements ApplicationListener
private void executeScriptUrl(Configuration cfg, Connection connection, String scriptUrl) throws Exception
{
Dialect dialect = Dialect.getDialect(cfg.getProperties());
String dialectStr = dialect.getClass().getName();
InputStream scriptInputStream = getScriptInputStream(dialect.getClass(), scriptUrl);
// check that it exists
if (scriptInputStream == null)
{
throw AlfrescoRuntimeException.create(ERR_SCRIPT_NOT_FOUND, scriptUrl);
}
// write the script to a temp location for future and failure reference
File tempFile = null;
try
{
tempFile = TempFileProvider.createTempFile("AlfrescoSchemaUpdate-" + dialectStr + "-", ".sql");
ContentWriter writer = new FileContentWriter(tempFile);
writer.putContent(scriptInputStream);
}
finally
{
try { scriptInputStream.close(); } catch (Throwable e) {} // usually a duplicate close
}
// now execute it
executeScriptFile(cfg, connection, scriptInputStream, scriptUrl);
executeScriptFile(cfg, connection, tempFile, scriptUrl);
}
/**
@@ -463,11 +464,12 @@ public class SchemaBootstrap implements ApplicationListener
private void executeScriptFile(
Configuration cfg,
Connection connection,
InputStream scriptInputStream,
File scriptFile,
String scriptUrl) throws Exception
{
logger.info(I18NUtil.getMessage(MSG_EXECUTING_SCRIPT, scriptUrl));
InputStream scriptInputStream = new FileInputStream(scriptFile);
BufferedReader reader = new BufferedReader(new InputStreamReader(scriptInputStream, "UTF8"));
try
{
@@ -512,21 +514,9 @@ public class SchemaBootstrap implements ApplicationListener
// execute, if required
if (execute)
{
Statement stmt = connection.createStatement();
try
{
sql = sb.toString();
if (logger.isDebugEnabled())
{
logger.debug("Executing statment: " + sql);
}
stmt.execute(sql);
sb = new StringBuilder(1024);
}
finally
{
try { stmt.close(); } catch (Throwable e) {}
}
sql = sb.toString();
executeStatement(connection, sql, line, scriptFile);
sb = new StringBuilder(1024);
}
}
}
@@ -536,4 +526,85 @@ public class SchemaBootstrap implements ApplicationListener
try { scriptInputStream.close(); } catch (Throwable e) {}
}
}
/**
* Execute the given SQL statement, absorbing exceptions that we expect during
* schema creation or upgrade.
*/
private void executeStatement(Connection connection, String sql, int line, File file) throws Exception
{
Statement stmt = connection.createStatement();
try
{
if (logger.isDebugEnabled())
{
logger.debug("Executing statment: " + sql);
}
stmt.execute(sql);
}
catch (SQLException e)
{
String msg = I18NUtil.getMessage(ERR_STATEMENT_FAILED, sql, e.getMessage(), file.getAbsolutePath(), line);
// ignore exceptions generated by the creation of indexes that already exist
logger.error(msg);
throw e;
}
finally
{
try { stmt.close(); } catch (Throwable e) {}
}
}
@Override
protected void onBootstrap(ApplicationEvent event)
{
// do everything in a transaction
Session session = getLocalSessionFactory().openSession();
Transaction transaction = session.beginTransaction();
try
{
// make sure that we don't autocommit
Connection connection = session.connection();
connection.setAutoCommit(false);
Configuration cfg = localSessionFactory.getConfiguration();
// dump the schema, if required
if (schemaOuputFilename != null)
{
File schemaOutputFile = new File(schemaOuputFilename);
dumpSchemaCreate(cfg, schemaOutputFile);
}
// update the schema, if required
if (updateSchema)
{
updateSchema(cfg, session, connection);
}
// verify that all patches have been applied correctly
checkSchemaPatchScripts(cfg, session, connection, validateUpdateScriptPatches, false); // check scripts
checkSchemaPatchScripts(cfg, session, connection, applyUpdateScriptPatches, false); // check scripts
// all done successfully
transaction.commit();
}
catch (Throwable e)
{
try { transaction.rollback(); } catch (Throwable ee) {}
if (updateSchema)
{
throw new AlfrescoRuntimeException(ERR_UPDATE_FAILED, e);
}
else
{
throw new AlfrescoRuntimeException(ERR_VALIDATION_FAILED, e);
}
}
}
@Override
protected void onShutdown(ApplicationEvent event)
{
// NOOP
}
}

View File

@@ -46,6 +46,7 @@ import org.alfresco.service.cmr.view.Location;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.AbstractLifecycleBean;
import org.alfresco.util.TempFileProvider;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -53,8 +54,6 @@ import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.context.event.ContextRefreshedEvent;
import org.springframework.util.FileCopyUtils;
/**
@@ -62,7 +61,7 @@ import org.springframework.util.FileCopyUtils;
*
* @author David Caruana
*/
public class ImporterBootstrap implements ApplicationListener
public class ImporterBootstrap extends AbstractLifecycleBean
{
// View Properties (used in setBootstrapViews)
public static final String VIEW_PATH_PROPERTY = "path";
@@ -643,16 +642,16 @@ public class ImporterBootstrap implements ApplicationListener
return true;
}
/*
* (non-Javadoc)
* @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent)
*/
public void onApplicationEvent(ApplicationEvent event)
@Override
protected void onBootstrap(ApplicationEvent event)
{
if (event instanceof ContextRefreshedEvent)
{
bootstrap();
}
bootstrap();
}
@Override
protected void onShutdown(ApplicationEvent event)
{
// NOOP
}
}

View File

@@ -22,7 +22,7 @@ import java.util.List;
import org.alfresco.repo.admin.patch.PatchDaoService;
import org.alfresco.repo.domain.AppliedPatch;
import org.alfresco.repo.domain.hibernate.VersionCounterDaoComponentImpl;
import org.alfresco.repo.version.common.counter.VersionCounterService;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
@@ -37,7 +37,7 @@ public class SystemExporterImporter
// dependencies
private NodeService nodeService;
private PatchDaoService patchDao;
private VersionCounterDaoComponentImpl versionCounterDao;
private VersionCounterService versionCounterService;
public void setNodeService(NodeService nodeService)
@@ -50,9 +50,9 @@ public class SystemExporterImporter
this.patchDao = patchDaoService;
}
public void setVersionDao(VersionCounterDaoComponentImpl versionCounterDao)
public void setVersionCounterService(VersionCounterService versionCounterService)
{
this.versionCounterDao = versionCounterDao;
this.versionCounterService = versionCounterService;
}
@@ -89,7 +89,7 @@ public class SystemExporterImporter
for (StoreRef storeRef : storeRefs)
{
VersionCounterInfo versionCounterInfo = new VersionCounterInfo();
int versionCount = versionCounterDao.currentVersionNumber(storeRef);
int versionCount = versionCounterService.currentVersionNumber(storeRef);
versionCounterInfo.storeRef = storeRef.toString();
versionCounterInfo.count = versionCount;
systemInfo.versionCounters.add(versionCounterInfo);
@@ -128,7 +128,7 @@ public class SystemExporterImporter
for (VersionCounterInfo versionCounterInfo : systemInfo.versionCounters)
{
StoreRef storeRef = new StoreRef(versionCounterInfo.storeRef);
versionCounterDao.setVersionNumber(storeRef, versionCounterInfo.count);
versionCounterService.setVersionNumber(storeRef, versionCounterInfo.count);
}
}

View File

@@ -26,9 +26,8 @@ import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.view.ImporterException;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.AbstractLifecycleBean;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.context.event.ContextRefreshedEvent;
/**
@@ -36,7 +35,7 @@ import org.springframework.context.event.ContextRefreshedEvent;
*
* @author davidc
*/
public class SystemInfoBootstrap implements ApplicationListener
public class SystemInfoBootstrap extends AbstractLifecycleBean
{
// dependencies
private TransactionService transactionService;
@@ -177,16 +176,16 @@ public class SystemInfoBootstrap implements ApplicationListener
return true;
}
/*
* (non-Javadoc)
* @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent)
*/
public void onApplicationEvent(ApplicationEvent event)
@Override
protected void onBootstrap(ApplicationEvent event)
{
if (event instanceof ContextRefreshedEvent)
{
bootstrap();
}
bootstrap();
}
@Override
protected void onShutdown(ApplicationEvent event)
{
// NOOP
}
}

View File

@@ -28,32 +28,32 @@ import org.alfresco.service.cmr.action.ParameterDefinition;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.namespace.QName;
import org.mozilla.javascript.Scriptable;
import org.mozilla.javascript.Wrapper;
/**
* Scripted Action service for describing and executing actions against Nodes.
*
*
* @author davidc
*/
public final class Actions implements Scopeable
{
/** Repository Service Registry */
private ServiceRegistry services;
/** Root scope for this object */
private Scriptable scope;
/**
* Constructor
*
* @param services repository service registry
* @param services
* repository service registry
*/
public Actions(ServiceRegistry services)
{
this.services = services;
}
/**
* @see org.alfresco.repo.jscript.Scopeable#setScope(org.mozilla.javascript.Scriptable)
*/
@@ -61,11 +61,11 @@ public final class Actions implements Scopeable
{
this.scope = scope;
}
/**
* Gets the list of registered action names
*
* @return the registered action names
* @return the registered action names
*/
public String[] getRegistered()
{
@@ -79,17 +79,18 @@ public final class Actions implements Scopeable
}
return registered;
}
public String[] jsGet_registered()
{
return getRegistered();
}
/**
* Create an Action
*
* @param actionName the action name
* @return the action
* @param actionName
* the action name
* @return the action
*/
public ScriptAction create(String actionName)
{
@@ -104,8 +105,7 @@ public final class Actions implements Scopeable
}
return scriptAction;
}
/**
* Scriptable Action
*
@@ -114,23 +114,25 @@ public final class Actions implements Scopeable
public final class ScriptAction implements Serializable, Scopeable
{
private static final long serialVersionUID = 5794161358406531996L;
/** Root scope for this object */
private Scriptable scope;
private Scriptable scope;
/** Converter with knowledge of action parameter values */
private ActionValueConverter converter;
/** Action state */
private Action action;
private ActionDefinition actionDef;
private ScriptableParameterMap<String, Serializable> parameters = null;
/**
* Construct
*
* @param action Alfresco action
* @param action
* Alfresco action
*/
public ScriptAction(Action action, ActionDefinition actionDef)
{
@@ -138,7 +140,7 @@ public final class Actions implements Scopeable
this.actionDef = actionDef;
this.converter = new ActionValueConverter();
}
/**
* @see org.alfresco.repo.jscript.Scopeable#setScope(org.mozilla.javascript.Scriptable)
*/
@@ -146,28 +148,25 @@ public final class Actions implements Scopeable
{
this.scope = scope;
}
/**
* Returns the action name
*
* @return action name
* @return action name
*/
public String getName()
{
return this.actionDef.getName();
}
public String jsGet_name()
{
return getName();
}
/**
* Return all the properties known about this node.
*
* The Map returned implements the Scriptable interface to allow access to the properties via
* JavaScript associative array access. This means properties of a node can be access thus:
* <code>node.properties["name"]</code>
* Return all the properties known about this node. The Map returned implements the Scriptable interface to allow access to the properties via JavaScript associative array
* access. This means properties of a node can be access thus: <code>node.properties["name"]</code>
*
* @return Map of properties for this Node.
*/
@@ -187,17 +186,18 @@ public final class Actions implements Scopeable
this.parameters.setModified(false);
}
return this.parameters;
}
}
public Map<String, Serializable> jsGet_parameters()
{
return getParameters();
}
/**
* Execute action
*
* @param node the node to execute action upon
* @param node
* the node to execute action upon
*/
@SuppressWarnings("synthetic-access")
public void execute(Node node)
@@ -206,7 +206,7 @@ public final class Actions implements Scopeable
{
Map<String, Serializable> actionParams = action.getParameterValues();
actionParams.clear();
for (Map.Entry<String, Serializable> entry : this.parameters.entrySet())
{
// perform the conversion from script wrapper object to repo serializable values
@@ -217,7 +217,7 @@ public final class Actions implements Scopeable
}
services.getActionService().executeAction(action, node.getNodeRef());
}
/**
* Value converter with specific knowledge of action parameters
*
@@ -227,10 +227,12 @@ public final class Actions implements Scopeable
{
/**
* Convert Action Parameter for Script usage
*
* @param paramName parameter name
* @param value value to convert
* @return converted value
*
* @param paramName
* parameter name
* @param value
* value to convert
* @return converted value
*/
@SuppressWarnings("synthetic-access")
public Serializable convertActionParamForScript(String paramName, Serializable value)
@@ -238,7 +240,7 @@ public final class Actions implements Scopeable
ParameterDefinition paramDef = actionDef.getParameterDefintion(paramName);
if (paramDef != null && paramDef.getType().equals(DataTypeDefinition.QNAME))
{
return ((QName)value).toPrefixString(services.getNamespaceService());
return ((QName) value).toPrefixString(services.getNamespaceService());
}
else
{
@@ -249,17 +251,45 @@ public final class Actions implements Scopeable
/**
* Convert Action Parameter for Java usage
*
* @param paramName parameter name
* @param value value to convert
* @return converted value
* @param paramName
* parameter name
* @param value
* value to convert
* @return converted value
*/
@SuppressWarnings("synthetic-access")
public Serializable convertActionParamForRepo(String paramName, Serializable value)
{
ParameterDefinition paramDef = actionDef.getParameterDefintion(paramName);
if (paramDef != null && paramDef.getType().equals(DataTypeDefinition.QNAME))
{
return QName.createQName((String)value, services.getNamespaceService());
if (value instanceof Wrapper)
{
// unwrap a Java object from a JavaScript wrapper
// recursively call this method to convert the unwrapped value
return convertActionParamForRepo(paramName, (Serializable) ((Wrapper) value).unwrap());
}
else
{
if (value instanceof String)
{
String stringQName = (String) value;
if (stringQName.startsWith("{"))
{
return QName.createQName(stringQName);
}
else
{
return QName.createQName(stringQName, services.getNamespaceService());
}
}
else
{
return value;
}
}
}
else
{
@@ -269,39 +299,41 @@ public final class Actions implements Scopeable
}
}
/**
* Scripted Parameter map with modified flag.
*
*
* @author davidc
*/
public static final class ScriptableParameterMap<K,V> extends ScriptableHashMap<K,V>
public static final class ScriptableParameterMap<K, V> extends ScriptableHashMap<K, V>
{
private static final long serialVersionUID = 574661815973241554L;
private boolean modified = false;
private boolean modified = false;
/**
* Is this a modified parameter map?
*
* @return true => modified
* @return true => modified
*/
/*package*/ boolean isModified()
/* package */boolean isModified()
{
return modified;
}
/**
* Set explicitly whether this map is modified
*
* @param modified true => modified, false => not modified
* @param modified
* true => modified, false => not modified
*/
/*package*/ void setModified(boolean modified)
/* package */void setModified(boolean modified)
{
this.modified = modified;
}
/* (non-Javadoc)
/*
* (non-Javadoc)
*
* @see org.mozilla.javascript.Scriptable#getClassName()
*/
@Override
@@ -310,7 +342,9 @@ public final class Actions implements Scopeable
return "ScriptableParameterMap";
}
/* (non-Javadoc)
/*
* (non-Javadoc)
*
* @see org.mozilla.javascript.Scriptable#delete(java.lang.String)
*/
@Override
@@ -320,7 +354,9 @@ public final class Actions implements Scopeable
setModified(true);
}
/* (non-Javadoc)
/*
* (non-Javadoc)
*
* @see org.mozilla.javascript.Scriptable#put(java.lang.String, org.mozilla.javascript.Scriptable, java.lang.Object)
*/
@Override
@@ -330,5 +366,5 @@ public final class Actions implements Scopeable
setModified(true);
}
}
}

View File

@@ -0,0 +1,206 @@
/*
* Copyright (C) 2005 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.jscript;
import java.util.Collection;
import org.alfresco.model.ContentModel;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.TemplateImageResolver;
import org.alfresco.service.cmr.search.CategoryService;
import org.alfresco.service.namespace.QName;
import org.mozilla.javascript.Scriptable;
/**
* Category Nodes from the classification helper have special support.
*
* @author Andy Hind
*/
public class CategoryNode extends Node
{
/**
* Constructor
*
* @param nodeRef
* @param services
* @param resolver
*/
public CategoryNode(NodeRef nodeRef, ServiceRegistry services, TemplateImageResolver resolver)
{
super(nodeRef, services, resolver);
}
/**
* Constructor
*
* @param nodeRef
* @param services
* @param resolver
* @param scope
*/
public CategoryNode(NodeRef nodeRef, ServiceRegistry services, TemplateImageResolver resolver, Scriptable scope)
{
super(nodeRef, services, resolver, scope);
}
/**
* @return all the members of a category
*/
public Node[] getCategoryMembers()
{
return buildNodes(services.getCategoryService().getChildren(getNodeRef(), CategoryService.Mode.MEMBERS, CategoryService.Depth.ANY));
}
public Node[] jsGet_categoryMembers()
{
return getCategoryMembers();
}
/**
* @return all the subcategories of a category
*/
public CategoryNode[] getSubCategories()
{
return buildCategoryNodes(services.getCategoryService().getChildren(getNodeRef(), CategoryService.Mode.SUB_CATEGORIES, CategoryService.Depth.ANY));
}
public CategoryNode[] jsGet_subCategories()
{
return getSubCategories();
}
/**
* @return members and subcategories of a category
*/
public Node[] getMembersAndSubCategories()
{
return buildMixedNodes(services.getCategoryService().getChildren(getNodeRef(), CategoryService.Mode.ALL, CategoryService.Depth.ANY));
}
public Node[] jsGet_membersAndSubCategories()
{
return getMembersAndSubCategories();
}
/**
* @return all the immediate member of a category
*/
public Node[] getImmediateCategoryMembers()
{
return buildNodes(services.getCategoryService().getChildren(getNodeRef(), CategoryService.Mode.MEMBERS, CategoryService.Depth.IMMEDIATE));
}
public Node[] jsGet_immediateCategoryMembers()
{
return getImmediateCategoryMembers();
}
/**
* @return all the immediate subcategories of a category
*/
public CategoryNode[] getImmediateSubCategories()
{
return buildCategoryNodes(services.getCategoryService().getChildren(getNodeRef(), CategoryService.Mode.SUB_CATEGORIES, CategoryService.Depth.IMMEDIATE));
}
public CategoryNode[] jsGet_immediateSubCategories()
{
return getImmediateSubCategories();
}
/**
* @return immediate members and subcategories of a category
*/
public Node[] getImmediateMembersAndSubCategories()
{
return buildMixedNodes(services.getCategoryService().getChildren(getNodeRef(), CategoryService.Mode.ALL, CategoryService.Depth.IMMEDIATE));
}
public Node[] jsGet_immediateMembersAndSubCategories()
{
return getImmediateMembersAndSubCategories();
}
/**
* Create a new subcategory
*
* @param name Of the category to create
*
* @return CategoryNode
*/
public CategoryNode createSubCategory(String name)
{
return new CategoryNode(services.getCategoryService().createCategory(getNodeRef(), name), this.services, this.imageResolver, this.scope);
}
/**
* Remove this category
*/
public void removeCategory()
{
services.getCategoryService().deleteCategory(getNodeRef());
}
@Override
public boolean isCategory()
{
return true;
}
private CategoryNode[] buildCategoryNodes(Collection<ChildAssociationRef> cars)
{
CategoryNode[] categoryNodes = new CategoryNode[cars.size()];
int i = 0;
for (ChildAssociationRef car : cars)
{
categoryNodes[i++] = new CategoryNode(car.getChildRef(), this.services, this.imageResolver, this.scope);
}
return categoryNodes;
}
private Node[] buildNodes(Collection<ChildAssociationRef> cars)
{
Node[] nodes = new Node[cars.size()];
int i = 0;
for (ChildAssociationRef car : cars)
{
nodes[i++] = new Node(car.getChildRef(), this.services, this.imageResolver, this.scope);
}
return nodes;
}
private Node[] buildMixedNodes(Collection<ChildAssociationRef> cars)
{
Node[] nodes = new Node[cars.size()];
int i = 0;
for (ChildAssociationRef car : cars)
{
QName type = services.getNodeService().getType(car.getChildRef());
if (services.getDictionaryService().isSubClass(type, ContentModel.TYPE_CATEGORY))
{
nodes[i++] = new CategoryNode(car.getChildRef(), this.services, this.imageResolver, this.scope);
}
else
{
nodes[i++] = new Node(car.getChildRef(), this.services, this.imageResolver, this.scope);
}
}
return nodes;
}
}

View File

@@ -0,0 +1,202 @@
/*
* Copyright (C) 2005 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.jscript;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.alfresco.model.ContentModel;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.TemplateImageResolver;
import org.alfresco.service.cmr.repository.TemplateNode;
import org.alfresco.service.cmr.search.CategoryService;
import org.alfresco.service.namespace.QName;
/**
* Category Nodes from the classification helper have special support.
*/
public class CategoryTemplateNode extends TemplateNode
{
/**
* Constructor
*
* @param nodeRef
* @param services
* @param resolver
*/
public CategoryTemplateNode(NodeRef nodeRef, ServiceRegistry services, TemplateImageResolver resolver)
{
super(nodeRef, services, resolver);
}
@Override
public boolean getIsCategory()
{
return true;
}
/**
* @return all the member of a category
*/
public List<TemplateNode> getCategoryMembers()
{
if (getIsCategory())
{
return buildTemplateNodeList(services.getCategoryService().getChildren(getNodeRef(),
CategoryService.Mode.MEMBERS, CategoryService.Depth.ANY));
}
else
{
return Collections.<TemplateNode>emptyList();
}
}
/**
* @return all the subcategories of a category
*/
public List<CategoryTemplateNode> getSubCategories()
{
if (getIsCategory())
{
return buildCategoryNodeList(services.getCategoryService().getChildren(getNodeRef(),
CategoryService.Mode.SUB_CATEGORIES, CategoryService.Depth.ANY));
}
else
{
return Collections.<CategoryTemplateNode>emptyList();
}
}
/**
* @return members and subcategories of a category
*/
public List<TemplateNode> getMembersAndSubCategories()
{
if (getIsCategory())
{
return buildMixedNodeList(services.getCategoryService().getChildren(getNodeRef(), CategoryService.Mode.ALL,
CategoryService.Depth.ANY));
}
else
{
return Collections.<TemplateNode>emptyList();
}
}
/**
* @return all the immediate member of a category
*/
public List<TemplateNode> getImmediateCategoryMembers()
{
if (getIsCategory())
{
return buildTemplateNodeList(services.getCategoryService().getChildren(getNodeRef(),
CategoryService.Mode.MEMBERS, CategoryService.Depth.IMMEDIATE));
}
else
{
return Collections.<TemplateNode>emptyList();
}
}
/**
* @return all the immediate subcategories of a category
*/
public List<CategoryTemplateNode> getImmediateSubCategories()
{
if (getIsCategory())
{
return buildCategoryNodeList(services.getCategoryService().getChildren(getNodeRef(),
CategoryService.Mode.SUB_CATEGORIES, CategoryService.Depth.IMMEDIATE));
}
else
{
return Collections.<CategoryTemplateNode>emptyList();
}
}
/**
* @return immediate members and subcategories of a category
*/
public List<TemplateNode> getImmediateMembersAndSubCategories()
{
if (getIsCategory())
{
return buildMixedNodeList(services.getCategoryService().getChildren(getNodeRef(),
CategoryService.Mode.ALL, CategoryService.Depth.IMMEDIATE));
}
else
{
return Collections.<TemplateNode>emptyList();
}
}
/**
* Support to build node lists from category service API calls.
*
* @param childRefs
*
* @return List of TemplateNode
*/
private List<TemplateNode> buildTemplateNodeList(Collection<ChildAssociationRef> childRefs)
{
List<TemplateNode> answer = new ArrayList<TemplateNode>(childRefs.size());
for (ChildAssociationRef ref : childRefs)
{
// create our Node representation from the NodeRef
TemplateNode child = new TemplateNode(ref.getChildRef(), this.services, this.imageResolver);
answer.add(child);
}
return answer;
}
private List<CategoryTemplateNode> buildCategoryNodeList(Collection<ChildAssociationRef> childRefs)
{
List<CategoryTemplateNode> answer = new ArrayList<CategoryTemplateNode>(childRefs.size());
for (ChildAssociationRef ref : childRefs)
{
// create our Node representation from the NodeRef
CategoryTemplateNode child = new CategoryTemplateNode(ref.getChildRef(), this.services, this.imageResolver);
answer.add(child);
}
return answer;
}
private List<TemplateNode> buildMixedNodeList(Collection<ChildAssociationRef> cars)
{
List<TemplateNode> nodes = new ArrayList<TemplateNode>(cars.size());
int i = 0;
for (ChildAssociationRef car : cars)
{
QName type = services.getNodeService().getType(car.getChildRef());
if (services.getDictionaryService().isSubClass(type, ContentModel.TYPE_CATEGORY))
{
nodes.add(new CategoryTemplateNode(car.getChildRef(), this.services, this.imageResolver));
}
else
{
nodes.add(new TemplateNode(car.getChildRef(), this.services, this.imageResolver));
}
}
return nodes;
}
}

View File

@@ -0,0 +1,144 @@
/*
* Copyright (C) 2005 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.jscript;
import java.util.Collection;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.repository.TemplateImageResolver;
import org.alfresco.service.cmr.search.CategoryService;
import org.alfresco.service.namespace.QName;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.mozilla.javascript.Scriptable;
/**
* Support class for finding categories, finding root nodes for categories and creating root categories.
*
* @author Andy Hind
*/
public final class Classification implements Scopeable
{
@SuppressWarnings("unused")
private Scriptable scope;
private ServiceRegistry services;
@SuppressWarnings("unused")
private TemplateImageResolver imageResolver;
private StoreRef storeRef;
public Classification(ServiceRegistry services, StoreRef storeRef, TemplateImageResolver imageResolver)
{
this.services = services;
this.imageResolver = imageResolver;
this.storeRef = storeRef;
}
/**
* @see org.alfresco.repo.jscript.Scopeable#setScope(org.mozilla.javascript.Scriptable)
*/
public void setScope(Scriptable scope)
{
this.scope = scope;
}
/**
* Find all the category nodes in a given classification.
*
* @param aspect
* @return
*/
public CategoryNode[] getAllCategoryNodes(String aspect)
{
return buildCategoryNodes(services.getCategoryService().getCategories(storeRef, createQName(aspect),
CategoryService.Depth.ANY));
}
/**
* Get all the aspects that define a classification.
*
* @return
*/
public String[] getAllClassificationAspects()
{
Collection<QName> aspects = services.getCategoryService().getClassificationAspects();
String[] answer = new String[aspects.size()];
int i = 0;
for (QName qname : aspects)
{
answer[i++] = qname.toPrefixString(this.services.getNamespaceService());
}
return answer;
}
public String[] jsGet_allClassificationAspects()
{
return getAllClassificationAspects();
}
/**
* Create a root category in a classification.
*
* @param aspect
* @param name
*/
public void createRootCategory(String aspect, String name)
{
services.getCategoryService().createRootCategory(storeRef, createQName(aspect), name);
}
/**
* Get the root categories in a classification.
*
* @param aspect
* @return
*/
public CategoryNode[] getRootCategories(String aspect)
{
return buildCategoryNodes(services.getCategoryService().getRootCategories(storeRef, createQName(aspect)));
}
private CategoryNode[] buildCategoryNodes(Collection<ChildAssociationRef> cars)
{
CategoryNode[] categoryNodes = new CategoryNode[cars.size()];
int i = 0;
for (ChildAssociationRef car : cars)
{
categoryNodes[i++] = new CategoryNode(car.getChildRef(), this.services, this.imageResolver, this.scope);
}
return categoryNodes;
}
private QName createQName(String s)
{
QName qname;
if (s.indexOf(QName.NAMESPACE_BEGIN) != -1)
{
qname = QName.createQName(s);
}
else
{
qname = QName.createQName(s, this.services.getNamespaceService());
}
return qname;
}
}

View File

@@ -92,7 +92,7 @@ public class Node implements Serializable, Scopeable
private final static String FOLDER_BROWSE_URL = "/navigate/browse/{0}/{1}/{2}";
/** Root scope for this object */
private Scriptable scope;
protected Scriptable scope;
/** Node Value Converter */
private NodeValueConverter converter = null;
@@ -110,18 +110,17 @@ public class Node implements Serializable, Scopeable
private Node[] children = null;
/** The properties of this node */
private ScriptableQNameMap<String, Serializable> properties = null;
private ServiceRegistry services = null;
protected ServiceRegistry services = null;
private NodeService nodeService = null;
private Boolean isDocument = null;
private Boolean isContainer = null;
private String displayPath = null;
private TemplateImageResolver imageResolver = null;
protected TemplateImageResolver imageResolver = null;
private Node parent = null;
private ChildAssociationRef primaryParentAssoc = null;
// NOTE: see the reset() method when adding new cached members!
// ------------------------------------------------------------------------------
// Construction
@@ -464,6 +463,20 @@ public class Node implements Serializable, Scopeable
return isDocument();
}
/**
* @return true if the Node is a Category
*/
public boolean isCategory()
{
// this valid is overriden by the CategoryNode sub-class
return false;
}
public boolean jsGet_isCategory()
{
return isCategory();
}
/**
* @return The list of aspects applied to this node
*/
@@ -890,7 +903,8 @@ public class Node implements Serializable, Scopeable
this.services.getPermissionService().deletePermission(this.nodeRef, authority, permission);
}
// -------------
// ------------------------------------------------------------------------------
// Ownership API
/**
@@ -1145,7 +1159,7 @@ public class Node implements Serializable, Scopeable
{
if (destination != null)
{
NodeRef copyRef = this.services.getCopyService().copy(
NodeRef copyRef = this.services.getCopyService().copyAndRename(
this.nodeRef,
destination.getNodeRef(),
ContentModel.ASSOC_CONTAINS,

View File

@@ -220,25 +220,24 @@ public class RhinoScriptService implements ScriptService
// add useful util objects
model.put("actions", new Actions(services));
model.put("logger", new ScriptLogger());
model.put("utils", new ScriptUtils());
// insert supplied object model into root of the default scope
for (String key : model.keySet())
{
for (String key : model.keySet())
// set the root scope on appropriate objects
// this is used to allow native JS object creation etc.
Object obj = model.get(key);
if (obj instanceof Scopeable)
{
// set the root scope on appropriate objects
// this is used to allow native JS object creation etc.
Object obj = model.get(key);
if (obj instanceof Scopeable)
{
((Scopeable)obj).setScope(scope);
}
// convert/wrap each object to JavaScript compatible
Object jsObject = Context.javaToJS(obj, scope);
// insert into the root scope ready for access by the script
ScriptableObject.putProperty(scope, key, jsObject);
((Scopeable)obj).setScope(scope);
}
// convert/wrap each object to JavaScript compatible
Object jsObject = Context.javaToJS(obj, scope);
// insert into the root scope ready for access by the script
ScriptableObject.putProperty(scope, key, jsObject);
}
// execute the script
@@ -343,6 +342,10 @@ public class RhinoScriptService implements ScriptService
model.put("search", new Search(services, companyHome.getStoreRef(), resolver));
model.put("session", new Session(services, resolver));
model.put("classification", new Classification(services, companyHome.getStoreRef(), resolver));
return model;
}
}

View File

@@ -0,0 +1,56 @@
/*
* Copyright (C) 2005 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.jscript;
import org.mozilla.javascript.Scriptable;
/**
* Place for general and miscellenous utility functions not already found in generic JavaScript.
*
* @author Kevin Roast
*/
public final class ScriptUtils implements Scopeable
{
/** Root scope for this object */
private Scriptable scope;
/**
* @see org.alfresco.repo.jscript.Scopeable#setScope(org.mozilla.javascript.Scriptable)
*/
public void setScope(Scriptable scope)
{
this.scope = scope;
}
/**
* Function to pad a string with zero '0' characters to the required length
*
* @param s String to pad with leading zero '0' characters
* @param len Length to pad to
*
* @return padded string or the original if already at >=len characters
*/
public String pad(String s, int len)
{
String result = s;
for (int i=0; i<(len - s.length()); i++)
{
result = "0" + result;
}
return result;
}
}

View File

@@ -0,0 +1,79 @@
/*
* Copyright (C) 2005 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.jscript;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.repository.TemplateImageResolver;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.mozilla.javascript.Scriptable;
/**
* Support object for session level properties etc.
* <p>
* Provides access to the user's authentication ticket.
*
* @author Andy Hind
*/
public class Session implements Scopeable
{
@SuppressWarnings("unused")
private static Log logger = LogFactory.getLog(Session.class);
@SuppressWarnings("unused")
private Scriptable scope;
private ServiceRegistry services;
@SuppressWarnings("unused")
private TemplateImageResolver imageResolver;
public Session(ServiceRegistry services, TemplateImageResolver imageResolver)
{
this.services = services;
this.imageResolver = imageResolver;
}
/**
* @see org.alfresco.repo.jscript.Scopeable#setScope(org.mozilla.javascript.Scriptable)
*/
public void setScope(Scriptable scope)
{
this.scope = scope;
}
/**
* Get the user's authentication ticket.
*
* @return
*/
public String getTicket()
{
return services.getAuthenticationService().getCurrentTicket();
}
/**
* Expose the user's authentication ticket as JavaScipt property.
*
* @return
*/
public String jsGet_ticket()
{
return getTicket();
}
}

View File

@@ -503,7 +503,7 @@ public class FileFolderServiceImpl implements FileFolderService
*/
public FileInfo rename(NodeRef sourceNodeRef, String newName) throws FileExistsException, FileNotFoundException
{
return move(sourceNodeRef, null, newName);
return moveOrCopy(sourceNodeRef, null, newName, true);
}
/**
@@ -815,12 +815,25 @@ public class FileFolderServiceImpl implements FileFolderService
for (int i = 0; i < folderCount; i++)
{
String pathElement = pathElements.get(i);
FileInfo pathElementInfo = getPathElementInfo(currentPath, rootNodeRef, parentNodeRef, pathElement, true);
parentNodeRef = pathElementInfo.getNodeRef();
NodeRef folderNodeRef = searchSimple(parentNodeRef, pathElement);
if (folderNodeRef == null)
{
StringBuilder sb = new StringBuilder(128);
sb.append("Folder not found: " + currentPath);
throw new FileNotFoundException(sb.toString());
}
parentNodeRef = folderNodeRef;
}
// we have resolved the folder path - resolve the last component
String pathElement = pathElements.get(pathElements.size() - 1);
FileInfo result = getPathElementInfo(currentPath, rootNodeRef, parentNodeRef, pathElement, false);
NodeRef fileNodeRef = searchSimple(parentNodeRef, pathElement);
if (fileNodeRef == null)
{
StringBuilder sb = new StringBuilder(128);
sb.append("File not found: " + currentPath);
throw new FileNotFoundException(sb.toString());
}
FileInfo result = getFileInfo(fileNodeRef);
// found it
if (logger.isDebugEnabled())
{
@@ -831,42 +844,6 @@ public class FileFolderServiceImpl implements FileFolderService
}
return result;
}
/**
* Helper method to dig down a level for a node based on name
*/
private FileInfo getPathElementInfo(
StringBuilder currentPath,
NodeRef rootNodeRef,
NodeRef parentNodeRef,
String pathElement,
boolean folderOnly) throws FileNotFoundException
{
currentPath.append("/").append(pathElement);
boolean includeFiles = (folderOnly ? false : true);
List<FileInfo> pathElementInfos = search(parentNodeRef, pathElement, includeFiles, true, false);
// check
if (pathElementInfos.size() == 0)
{
StringBuilder sb = new StringBuilder(128);
sb.append(folderOnly ? "Folder" : "File or folder").append(" not found: \n")
.append(" root: ").append(rootNodeRef).append("\n")
.append(" path: ").append(currentPath);
throw new FileNotFoundException(sb.toString());
}
else if (pathElementInfos.size() > 1)
{
// we have detected a duplicate name - warn, but allow
StringBuilder sb = new StringBuilder(128);
sb.append("Duplicate file or folder found: \n")
.append(" root: ").append(rootNodeRef).append("\n")
.append(" path: ").append(currentPath);
logger.warn(sb);
}
FileInfo pathElementInfo = pathElementInfos.get(0);
return pathElementInfo;
}
public FileInfo getFileInfo(NodeRef nodeRef)
{

View File

@@ -236,11 +236,14 @@ public interface NodeDaoService
*/
public List<Serializable> getPropertyValuesByActualType(DataTypeDefinition actualDataTypeDefinition);
public Transaction getLastTxn(final StoreRef storeRef);
public int getTxnUpdateCountForStore(final StoreRef storeRef, final long txnId);
public int getTxnDeleteCountForStore(final StoreRef storeRef, final long txnId);
public Transaction getTxnById(long txnId);
public Transaction getLastTxn();
public Transaction getLastTxnForStore(final StoreRef storeRef);
public int getTxnUpdateCount(final long txnId);
public int getTxnDeleteCount(final long txnId);
public int getTransactionCount();
public List<Transaction> getNextTxns(final Transaction lastTxn, final int count);
public List<Transaction> getNextTxns(final long lastTxnId, final int count);
public List<Transaction> getNextRemoteTxns(final long lastTxnId, final int count);
public List<NodeRef> getTxnChangesForStore(final StoreRef storeRef, final long txnId);
public List<NodeRef> getTxnChanges(final long txnId);
}

View File

@@ -18,6 +18,7 @@ package org.alfresco.repo.node.db.hibernate;
import java.io.Serializable;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
@@ -97,6 +98,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
private final String uuid;
private static TransactionAwareSingleton<Long> serverIdSingleton = new TransactionAwareSingleton<Long>();
private final String ipAddress;
/**
*
@@ -104,6 +106,14 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
public HibernateNodeDaoServiceImpl()
{
this.uuid = GUID.generate();
try
{
ipAddress = InetAddress.getLocalHost().getHostAddress();
}
catch (UnknownHostException e)
{
throw new AlfrescoRuntimeException("Failed to get server IP address", e);
}
}
/**
@@ -148,7 +158,6 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
}
try
{
final String ipAddress = InetAddress.getLocalHost().getHostAddress();
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
@@ -994,16 +1003,46 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
/*
* Queries for transactions
*/
private static final String QUERY_GET_LAST_TXN_ID = "txn.GetLastTxnId";
private static final String QUERY_GET_LAST_TXN_ID_FOR_STORE = "txn.GetLastTxnIdForStore";
private static final String QUERY_GET_TXN_UPDATE_COUNT_FOR_STORE = "txn.GetTxnUpdateCountForStore";
private static final String QUERY_GET_TXN_DELETE_COUNT_FOR_STORE = "txn.GetTxnDeleteCountForStore";
private static final String QUERY_COUNT_TRANSACTIONS = "txn.CountTransactions";
private static final String QUERY_GET_NEXT_TXNS = "txn.GetNextTxns";
private static final String QUERY_GET_NEXT_REMOTE_TXNS = "txn.GetNextRemoteTxns";
private static final String QUERY_GET_TXN_CHANGES_FOR_STORE = "txn.GetTxnChangesForStore";
private static final String QUERY_GET_TXN_CHANGES = "txn.GetTxnChanges";
public Transaction getTxnById(long txnId)
{
return (Transaction) getSession().get(TransactionImpl.class, new Long(txnId));
}
@SuppressWarnings("unchecked")
public Transaction getLastTxn(final StoreRef storeRef)
public Transaction getLastTxn()
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_LAST_TXN_ID);
query.setMaxResults(1)
.setReadOnly(true);
return query.uniqueResult();
}
};
Long txnId = (Long) getHibernateTemplate().execute(callback);
Transaction txn = null;
if (txnId != null)
{
txn = (Transaction) getSession().get(TransactionImpl.class, txnId);
}
// done
return txn;
}
@SuppressWarnings("unchecked")
public Transaction getLastTxnForStore(final StoreRef storeRef)
{
HibernateCallback callback = new HibernateCallback()
{
@@ -1028,7 +1067,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
}
@SuppressWarnings("unchecked")
public int getTxnUpdateCountForStore(final StoreRef storeRef, final long txnId)
public int getTxnUpdateCount(final long txnId)
{
HibernateCallback callback = new HibernateCallback()
{
@@ -1036,9 +1075,6 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
{
Query query = session.getNamedQuery(QUERY_GET_TXN_UPDATE_COUNT_FOR_STORE);
query.setLong("txnId", txnId)
.setString("protocol", storeRef.getProtocol())
.setString("identifier", storeRef.getIdentifier())
.setMaxResults(1)
.setReadOnly(true);
return query.uniqueResult();
}
@@ -1049,7 +1085,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
}
@SuppressWarnings("unchecked")
public int getTxnDeleteCountForStore(final StoreRef storeRef, final long txnId)
public int getTxnDeleteCount(final long txnId)
{
HibernateCallback callback = new HibernateCallback()
{
@@ -1057,9 +1093,6 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
{
Query query = session.getNamedQuery(QUERY_GET_TXN_DELETE_COUNT_FOR_STORE);
query.setLong("txnId", txnId)
.setString("protocol", storeRef.getProtocol())
.setString("identifier", storeRef.getIdentifier())
.setMaxResults(1)
.setReadOnly(true);
return query.uniqueResult();
}
@@ -1088,14 +1121,12 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
}
@SuppressWarnings("unchecked")
public List<Transaction> getNextTxns(final Transaction lastTxn, final int count)
public List<Transaction> getNextTxns(final long lastTxnId, final int count)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
long lastTxnId = (lastTxn == null) ? -1L : lastTxn.getId();
Query query = session.getNamedQuery(QUERY_GET_NEXT_TXNS);
query.setLong("lastTxnId", lastTxnId)
.setMaxResults(count)
@@ -1108,6 +1139,26 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
return results;
}
@SuppressWarnings("unchecked")
public List<Transaction> getNextRemoteTxns(final long lastTxnId, final int count)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_NEXT_REMOTE_TXNS);
query.setLong("lastTxnId", lastTxnId)
.setString("serverIpAddress", ipAddress)
.setMaxResults(count)
.setReadOnly(true);
return query.list();
}
};
List<Transaction> results = (List<Transaction>) getHibernateTemplate().execute(callback);
// done
return results;
}
@SuppressWarnings("unchecked")
public List<NodeRef> getTxnChangesForStore(final StoreRef storeRef, final long txnId)
{

View File

@@ -16,20 +16,30 @@
*/
package org.alfresco.repo.node.index;
import java.util.List;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import net.sf.acegisecurity.Authentication;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.domain.Transaction;
import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.impl.lucene.LuceneQueryParser;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.transaction.TransactionComponent;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.repository.NodeRef.Status;
import org.alfresco.service.cmr.search.ResultSet;
import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.util.PropertyCheck;
import org.alfresco.util.VmShutdownListener;
@@ -224,4 +234,229 @@ public abstract class AbstractReindexComponent implements IndexRecovery
}
}
}
/**
* Gets the last indexed transaction working back from the provided index.
* This method can be used to hunt for a starting point for indexing of
* transactions not yet in the index.
*/
protected long getLastIndexedTxn(long lastTxnId)
{
// get the last transaction
long lastFoundTxnId = lastTxnId + 10L;
boolean found = false;
while (!found && lastFoundTxnId >= 0)
{
// reduce the transaction ID
lastFoundTxnId = lastFoundTxnId - 10L;
// break out as soon as we find a transaction that is in the index
found = isTxnIdPresentInIndex(lastFoundTxnId);
if (found)
{
break;
}
}
// done
if (logger.isDebugEnabled())
{
logger.debug("Found last index txn before " + lastTxnId + ": " + lastFoundTxnId);
}
return lastFoundTxnId;
}
protected boolean isTxnIdPresentInIndex(long txnId)
{
if (logger.isDebugEnabled())
{
logger.debug("Checking for transaction in index: " + txnId);
}
Transaction txn = nodeDaoService.getTxnById(txnId);
if (txn == null)
{
return true;
}
// count the changes in the transaction
int updateCount = nodeDaoService.getTxnUpdateCount(txnId);
int deleteCount = nodeDaoService.getTxnDeleteCount(txnId);
if (logger.isDebugEnabled())
{
logger.debug("Transaction has " + updateCount + " updates and " + deleteCount + " deletes: " + txnId);
}
// get the stores
boolean found = false;
List<StoreRef> storeRefs = nodeService.getStores();
for (StoreRef storeRef : storeRefs)
{
boolean inStore = isTxnIdPresentInIndex(storeRef, txn, updateCount, deleteCount);
if (inStore)
{
// found in a particular store
found = true;
break;
}
}
// done
if (logger.isDebugEnabled())
{
logger.debug("Transaction " + txnId + " was " + (found ? "found" : "not found") + " in indexes.");
}
return found;
}
/**
* @return Returns true if the given transaction is indexed in the in the
*/
private boolean isTxnIdPresentInIndex(StoreRef storeRef, Transaction txn, int updateCount, int deleteCount)
{
long txnId = txn.getId();
String changeTxnId = txn.getChangeTxnId();
// do the most update check, which is most common
if (updateCount > 0)
{
ResultSet results = null;
try
{
SearchParameters sp = new SearchParameters();
sp.addStore(storeRef);
// search for it in the index, sorting with youngest first, fetching only 1
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery("TX:" + LuceneQueryParser.escape(changeTxnId));
sp.setLimit(1);
results = searcher.query(sp);
if (results.length() > 0)
{
if (logger.isDebugEnabled())
{
logger.debug("Index has results for txn (OK): " + txnId);
}
return true; // there were updates/creates and results for the txn were found
}
else
{
if (logger.isDebugEnabled())
{
logger.debug("Index has no results for txn (Index out of date): " + txnId);
}
return false;
}
}
finally
{
if (results != null) { results.close(); }
}
}
// there have been deletes, so we have to ensure that none of the nodes deleted are present in the index
// get all node refs for the transaction
List<NodeRef> nodeRefs = nodeDaoService.getTxnChangesForStore(storeRef, txnId);
for (NodeRef nodeRef : nodeRefs)
{
if (logger.isDebugEnabled())
{
logger.debug("Searching for node in index: \n" +
" node: " + nodeRef + "\n" +
" txn: " + txnId);
}
// we know that these are all deletions
ResultSet results = null;
try
{
SearchParameters sp = new SearchParameters();
sp.addStore(storeRef);
// search for it in the index, sorting with youngest first, fetching only 1
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery("ID:" + LuceneQueryParser.escape(nodeRef.toString()));
sp.setLimit(1);
results = searcher.query(sp);
if (results.length() == 0)
{
// no results, as expected
if (logger.isDebugEnabled())
{
logger.debug(" --> Node not found (OK)");
}
continue;
}
else
{
if (logger.isDebugEnabled())
{
logger.debug(" --> Node found (Index out of date)");
}
return false;
}
}
finally
{
if (results != null) { results.close(); }
}
}
// all tests passed
if (logger.isDebugEnabled())
{
logger.debug("Index is in synch with transaction: " + txnId);
}
return true;
}
/**
* Perform a full reindexing of the given transaction in the context of a completely
* new transaction.
*
* @param txnId the transaction identifier
*/
protected void reindexTransaction(final long txnId)
{
if (logger.isDebugEnabled())
{
logger.debug("Reindexing transaction: " + txnId);
}
TransactionWork<Object> reindexWork = new TransactionWork<Object>()
{
public Object doWork() throws Exception
{
// get the node references pertinent to the transaction
List<NodeRef> nodeRefs = nodeDaoService.getTxnChanges(txnId);
// reindex each node
for (NodeRef nodeRef : nodeRefs)
{
Status nodeStatus = nodeService.getNodeStatus(nodeRef);
if (nodeStatus == null)
{
// it's not there any more
continue;
}
if (nodeStatus.isDeleted()) // node deleted
{
// only the child node ref is relevant
ChildAssociationRef assocRef = new ChildAssociationRef(
ContentModel.ASSOC_CHILDREN,
null,
null,
nodeRef);
indexer.deleteNode(assocRef);
}
else // node created
{
// get the primary assoc for the node
ChildAssociationRef primaryAssocRef = nodeService.getPrimaryParent(nodeRef);
// reindex
indexer.createNode(primaryAssocRef);
}
}
// done
return null;
}
};
TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, reindexWork, true);
// done
}
}

View File

@@ -21,27 +21,25 @@ import java.util.List;
import org.alfresco.i18n.I18NUtil;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.domain.Transaction;
import org.alfresco.repo.search.impl.lucene.LuceneQueryParser;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.repository.NodeRef.Status;
import org.alfresco.service.cmr.search.ResultSet;
import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.service.cmr.search.SearchService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Component to check and recover the indexes.
* Component to check and recover the indexes. By default, the server is
* put into read-only mode during the reindex process in order to prevent metadata changes.
* This is not critical and can be {@link #setLockServer(boolean) switched off} if the
* server is required immediately.
*
* @author Derek Hulley
*/
public class FullIndexRecoveryComponent extends AbstractReindexComponent
{
private static final String ERR_STORE_NOT_UP_TO_DATE = "index.recovery.store_not_up_to_date";
private static final String ERR_INDEX_OUT_OF_DATE = "index.recovery.out_of_date";
private static final String MSG_RECOVERY_STARTING = "index.recovery.starting";
private static final String MSG_RECOVERY_COMPLETE = "index.recovery.complete";
private static final String MSG_RECOVERY_PROGRESS = "index.recovery.progress";
@@ -51,17 +49,25 @@ public class FullIndexRecoveryComponent extends AbstractReindexComponent
public static enum RecoveryMode
{
/** Do nothing - not even a check */
/** Do nothing - not even a check. */
NONE,
/** Perform a quick check on the state of the indexes only */
/**
* Perform a quick check on the state of the indexes only.
*/
VALIDATE,
/** Performs a quick validation and then starts a full pass-through on failure */
/**
* Performs a validation and starts a quick recovery, if necessary.
*/
AUTO,
/** Performs a full pass-through of all recorded transactions to ensure that the indexes are up to date */
/**
* Performs a full pass-through of all recorded transactions to ensure that the indexes
* are up to date.
*/
FULL;
}
private RecoveryMode recoveryMode;
private boolean lockServer;
public FullIndexRecoveryComponent()
{
@@ -69,7 +75,8 @@ public class FullIndexRecoveryComponent extends AbstractReindexComponent
}
/**
* Set the type of recovery to perform.
* Set the type of recovery to perform. Default is {@link RecoveryMode#VALIDATE to validate}
* the indexes only.
*
* @param recoveryMode one of the {@link RecoveryMode } values
*/
@@ -77,7 +84,18 @@ public class FullIndexRecoveryComponent extends AbstractReindexComponent
{
this.recoveryMode = RecoveryMode.valueOf(recoveryMode);
}
/**
* Set this on to put the server into READ-ONLY mode for the duration of the index recovery.
* The default is <tt>true</tt>, i.e. the server will be locked against further updates.
*
* @param lockServer true to force the server to be read-only
*/
public void setLockServer(boolean lockServer)
{
this.lockServer = lockServer;
}
@Override
protected void reindexImpl()
{
@@ -99,25 +117,22 @@ public class FullIndexRecoveryComponent extends AbstractReindexComponent
}
else // validate first
{
List<StoreRef> storeRefs = nodeService.getStores();
for (StoreRef storeRef : storeRefs)
Transaction txn = nodeDaoService.getLastTxn();
if (txn == null)
{
// get the last txn ID in the database
Transaction txn = nodeDaoService.getLastTxn(storeRef);
boolean lastChangeTxnIdInIndex = isTxnIdPresentInIndex(storeRef, txn);
if (lastChangeTxnIdInIndex)
{
// this store is good
continue;
}
// this store isn't up to date
String msg = I18NUtil.getMessage(ERR_STORE_NOT_UP_TO_DATE, storeRef);
// no transactions - just bug out
return;
}
long txnId = txn.getId();
boolean txnInIndex = isTxnIdPresentInIndex(txnId);
if (!txnInIndex)
{
String msg = I18NUtil.getMessage(ERR_INDEX_OUT_OF_DATE);
logger.warn(msg);
// the store is out of date - validation failed
// this store isn't up to date
if (recoveryMode == RecoveryMode.VALIDATE)
{
// next store
continue;
// the store is out of date - validation failed
}
else if (recoveryMode == RecoveryMode.AUTO)
{
@@ -130,8 +145,11 @@ public class FullIndexRecoveryComponent extends AbstractReindexComponent
boolean allowWrite = !transactionService.isReadOnly();
try
{
// set the server into read-only mode
transactionService.setAllowWrite(false);
if (lockServer)
{
// set the server into read-only mode
transactionService.setAllowWrite(false);
}
// do we need to perform a full recovery
if (fullRecoveryRequired)
@@ -160,8 +178,9 @@ public class FullIndexRecoveryComponent extends AbstractReindexComponent
Transaction lastTxn = null;
while(true)
{
long lastTxnId = (lastTxn == null) ? -1L : lastTxn.getId().longValue();
List<Transaction> nextTxns = nodeDaoService.getNextTxns(
lastTxn,
lastTxnId,
MAX_TRANSACTIONS_PER_ITERATION);
// reindex each transaction
@@ -256,125 +275,4 @@ public class FullIndexRecoveryComponent extends AbstractReindexComponent
TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, reindexWork, true);
// done
}
private boolean isTxnIdPresentInIndex(StoreRef storeRef, Transaction txn)
{
if (logger.isDebugEnabled())
{
logger.debug("Checking for transaction in index: \n" +
" store: " + storeRef + "\n" +
" txn: " + txn);
}
String changeTxnId = txn.getChangeTxnId();
// count the changes in the transaction
int updateCount = nodeDaoService.getTxnUpdateCountForStore(storeRef, txn.getId());
int deleteCount = nodeDaoService.getTxnDeleteCountForStore(storeRef, txn.getId());
if (logger.isDebugEnabled())
{
logger.debug("Transaction has " + updateCount + " updates and " + deleteCount + " deletes: " + txn);
}
// do the most update check, which is most common
if (deleteCount == 0 && updateCount == 0)
{
if (logger.isDebugEnabled())
{
logger.debug("No changes in transaction: " + txn);
}
// there's nothing to check for
return true;
}
else if (updateCount > 0)
{
ResultSet results = null;
try
{
SearchParameters sp = new SearchParameters();
sp.addStore(storeRef);
// search for it in the index, sorting with youngest first, fetching only 1
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery("TX:" + LuceneQueryParser.escape(changeTxnId));
sp.setLimit(1);
results = searcher.query(sp);
if (results.length() > 0)
{
if (logger.isDebugEnabled())
{
logger.debug("Index has results for txn (OK): " + txn);
}
return true; // there were updates/creates and results for the txn were found
}
else
{
if (logger.isDebugEnabled())
{
logger.debug("Index has no results for txn (Index out of date): " + txn);
}
return false;
}
}
finally
{
if (results != null) { results.close(); }
}
}
// there have been deletes, so we have to ensure that none of the nodes deleted are present in the index
// get all node refs for the transaction
Long txnId = txn.getId();
List<NodeRef> nodeRefs = nodeDaoService.getTxnChangesForStore(storeRef, txnId);
for (NodeRef nodeRef : nodeRefs)
{
if (logger.isDebugEnabled())
{
logger.debug("Searching for node in index: \n" +
" node: " + nodeRef + "\n" +
" txn: " + txn);
}
// we know that these are all deletions
ResultSet results = null;
try
{
SearchParameters sp = new SearchParameters();
sp.addStore(storeRef);
// search for it in the index, sorting with youngest first, fetching only 1
sp.setLanguage(SearchService.LANGUAGE_LUCENE);
sp.setQuery("ID:" + LuceneQueryParser.escape(nodeRef.toString()));
sp.setLimit(1);
results = searcher.query(sp);
if (results.length() == 0)
{
// no results, as expected
if (logger.isDebugEnabled())
{
logger.debug(" --> Node not found (OK)");
}
continue;
}
else
{
if (logger.isDebugEnabled())
{
logger.debug(" --> Node found (Index out of date)");
}
return false;
}
}
finally
{
if (results != null) { results.close(); }
}
}
// all tests passed
if (logger.isDebugEnabled())
{
logger.debug("Index is in synch with transaction: " + txn);
}
return true;
}
}

View File

@@ -0,0 +1,119 @@
/*
* Copyright (C) 2005-2006 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.node.index;
import java.util.List;
import org.alfresco.repo.domain.Transaction;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Component to check and recover the indexes.
*
* @author Derek Hulley
*/
public class IndexRemoteTransactionTracker extends AbstractReindexComponent
{
private static Log logger = LogFactory.getLog(IndexRemoteTransactionTracker.class);
private boolean remoteOnly;
private long currentTxnId;
public IndexRemoteTransactionTracker()
{
remoteOnly = true;
currentTxnId = -1L;
}
/**
* Set whether or not this component should only track remote transactions.
* By default, it is <tt>true</tt>, but under certain test conditions, it may
* be desirable to track local transactions too; e.g. during testing of clustering
* when running multiple instances on the same machine.
*
* @param remoteOnly <tt>true</tt> to reindex only those transactions that were
* committed to the database by a remote server.
*/
public void setRemoteOnly(boolean remoteOnly)
{
this.remoteOnly = remoteOnly;
}
@Override
protected void reindexImpl()
{
if (currentTxnId < 0)
{
// initialize the starting point
Transaction lastTxn = nodeDaoService.getLastTxn();
if (lastTxn == null)
{
// there is nothing to do
return;
}
long lastTxnId = lastTxn.getId();
currentTxnId = getLastIndexedTxn(lastTxnId);
}
if (logger.isDebugEnabled())
{
logger.debug("Performing index tracking from txn " + currentTxnId);
}
while (true)
{
// get next transactions to index
List<Transaction> txns = getNextTransactions(currentTxnId);
if (txns.size() == 0)
{
// we've caught up
break;
}
// break out if the VM is shutting down
if (isShuttingDown())
{
break;
}
// reindex all "foreign" or "local" transactions, one at a time
for (Transaction txn : txns)
{
long txnId = txn.getId();
reindexTransaction(txnId);
currentTxnId = txnId;
}
}
}
private static final int MAX_TXN_COUNT = 1000;
private List<Transaction> getNextTransactions(long currentTxnId)
{
List<Transaction> txns = null;
if (remoteOnly)
{
txns = nodeDaoService.getNextRemoteTxns(currentTxnId, MAX_TXN_COUNT);
}
else
{
txns = nodeDaoService.getNextTxns(currentTxnId, MAX_TXN_COUNT);
}
// done
return txns;
}
}

View File

@@ -0,0 +1,125 @@
/*
* Copyright (C) 2005-2006 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.node.index;
import junit.framework.TestCase;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.repo.transaction.TransactionComponent;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.model.FileFolderService;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.ApplicationContextHelper;
import org.springframework.context.ApplicationContext;
/**
* @see org.alfresco.repo.node.index.IndexRemoteTransactionTracker
*
* @author Derek Hulley
*/
@SuppressWarnings("unused")
public class IndexRemoteTransactionTrackerTest extends TestCase
{
private static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext();
private AuthenticationComponent authenticationComponent;
private SearchService searchService;
private NodeService nodeService;
private FileFolderService fileFolderService;
private ContentStore contentStore;
private FullTextSearchIndexer ftsIndexer;
private Indexer indexer;
private NodeRef rootNodeRef;
private IndexRemoteTransactionTracker indexTracker;
public void setUp() throws Exception
{
ServiceRegistry serviceRegistry = (ServiceRegistry) ctx.getBean(ServiceRegistry.SERVICE_REGISTRY);
searchService = serviceRegistry.getSearchService();
nodeService = serviceRegistry.getNodeService();
fileFolderService = serviceRegistry.getFileFolderService();
authenticationComponent = (AuthenticationComponent) ctx.getBean("authenticationComponentImpl");
contentStore = (ContentStore) ctx.getBean("fileContentStore");
ftsIndexer = (FullTextSearchIndexer) ctx.getBean("LuceneFullTextSearchIndexer");
indexer = (Indexer) ctx.getBean("indexerComponent");
NodeDaoService nodeDaoService = (NodeDaoService) ctx.getBean("nodeDaoService");
TransactionService transactionService = serviceRegistry.getTransactionService();
indexTracker = new IndexRemoteTransactionTracker();
indexTracker.setAuthenticationComponent(authenticationComponent);
indexTracker.setFtsIndexer(ftsIndexer);
indexTracker.setIndexer(indexer);
indexTracker.setNodeDaoService(nodeDaoService);
indexTracker.setNodeService(nodeService);
indexTracker.setSearcher(searchService);
indexTracker.setTransactionComponent((TransactionComponent)transactionService);
// authenticate
authenticationComponent.setSystemUserAsCurrentUser();
// disable indexing
TransactionWork<ChildAssociationRef> createNodeWork = new TransactionWork<ChildAssociationRef>()
{
public ChildAssociationRef doWork() throws Exception
{
StoreRef storeRef = new StoreRef("test", getName() + "-" + System.currentTimeMillis());
NodeRef rootNodeRef = null;
if (!nodeService.exists(storeRef))
{
nodeService.createStore(storeRef.getProtocol(), storeRef.getIdentifier());
}
rootNodeRef = nodeService.getRootNode(storeRef);
// create another node
ChildAssociationRef childAssocRef = nodeService.createNode(
rootNodeRef,
ContentModel.ASSOC_CHILDREN,
QName.createQName(NamespaceService.ALFRESCO_URI, "xyz"),
ContentModel.TYPE_FOLDER);
// remove the node from the index
indexer.deleteNode(childAssocRef);
return childAssocRef;
}
};
ChildAssociationRef childAssocRef = TransactionUtil.executeInUserTransaction(transactionService, createNodeWork);
}
public void testSetup() throws Exception
{
}
public synchronized void testStartup() throws Exception
{
indexTracker.reindex();
indexTracker.reindex();
}
}

View File

@@ -118,7 +118,8 @@ public class BaseRuleTest extends BaseSpringTest
this.transactionService = (TransactionService)this.applicationContext.getBean("transactionComponent");
this.authenticationComponent = (AuthenticationComponent)this.applicationContext.getBean("authenticationComponent");
authenticationComponent.setSystemUserAsCurrentUser();
//authenticationComponent.setSystemUserAsCurrentUser();
authenticationComponent.setCurrentUser("admin");
// Get the rule type
this.ruleType = this.ruleService.getRuleType(RULE_TYPE_NAME);

View File

@@ -149,7 +149,8 @@ public class RuleServiceCoverageTest extends TestCase
this.authenticationComponent = (AuthenticationComponent)applicationContext.getBean("authenticationComponent");
//authenticationComponent.setCurrentUser(authenticationComponent.getSystemUserName());
authenticationComponent.setSystemUserAsCurrentUser();
//authenticationComponent.setSystemUserAsCurrentUser();
authenticationComponent.setCurrentUser("admin");
this.testStoreRef = this.nodeService.createStore(StoreRef.PROTOCOL_WORKSPACE, "Test_" + System.currentTimeMillis());
this.rootNodeRef = this.nodeService.getRootNode(this.testStoreRef);

View File

@@ -40,6 +40,8 @@ import org.alfresco.service.cmr.rule.Rule;
import org.alfresco.service.cmr.rule.RuleService;
import org.alfresco.service.cmr.rule.RuleServiceException;
import org.alfresco.service.cmr.rule.RuleType;
import org.alfresco.service.cmr.security.AccessStatus;
import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.namespace.RegexQNamePattern;
import org.alfresco.util.GUID;
@@ -92,6 +94,11 @@ public class RuleServiceImpl implements RuleService, RuntimeRuleService
*/
private DictionaryService dictionaryService;
/**
* The permission service
*/
private PermissionService permissionService;
/**
* The action service implementation which we need for some things.
*/
@@ -179,6 +186,16 @@ public class RuleServiceImpl implements RuleService, RuntimeRuleService
this.dictionaryService = dictionaryService;
}
/**
* Set the permission service
*
* @param permissionService the permission service
*/
public void setPermissionService(PermissionService permissionService)
{
this.permissionService = permissionService;
}
/**
* Set the global rules disabled flag
*
@@ -572,49 +589,56 @@ public class RuleServiceImpl implements RuleService, RuntimeRuleService
*/
public void saveRule(NodeRef nodeRef, Rule rule)
{
disableRules();
try
{
if (this.nodeService.exists(nodeRef) == false)
{
throw new RuleServiceException("The node does not exist.");
}
NodeRef ruleNodeRef = rule.getNodeRef();
if (ruleNodeRef == null)
{
if (this.nodeService.hasAspect(nodeRef, RuleModel.ASPECT_RULES) == false)
{
// Add the actionable aspect
this.nodeService.addAspect(nodeRef, RuleModel.ASPECT_RULES, null);
}
// Create the action node
ruleNodeRef = this.nodeService.createNode(
getSavedRuleFolderRef(nodeRef),
ContentModel.ASSOC_CONTAINS,
QName.createQName(RuleModel.RULE_MODEL_URI, ASSOC_NAME_RULES_PREFIX + GUID.generate()),
RuleModel.TYPE_RULE).getChildRef();
// Set the rule node reference and the owning node reference
rule.setNodeRef(ruleNodeRef);
}
// Update the properties of the rule
this.nodeService.setProperty(ruleNodeRef, ContentModel.PROP_TITLE, rule.getTitle());
this.nodeService.setProperty(ruleNodeRef, ContentModel.PROP_DESCRIPTION, rule.getDescription());
this.nodeService.setProperty(ruleNodeRef, RuleModel.PROP_RULE_TYPE, (Serializable)rule.getRuleTypes());
this.nodeService.setProperty(ruleNodeRef, RuleModel.PROP_APPLY_TO_CHILDREN, rule.isAppliedToChildren());
this.nodeService.setProperty(ruleNodeRef, RuleModel.PROP_EXECUTE_ASYNC, rule.getExecuteAsynchronously());
this.nodeService.setProperty(ruleNodeRef, RuleModel.PROP_DISABLED, rule.getRuleDisabled());
// Save the rule's action
saveAction(ruleNodeRef, rule);
}
finally
{
enableRules();
}
if (this.permissionService.hasPermission(nodeRef, PermissionService.CHANGE_PERMISSIONS) == AccessStatus.ALLOWED)
{
disableRules();
try
{
if (this.nodeService.exists(nodeRef) == false)
{
throw new RuleServiceException("The node does not exist.");
}
NodeRef ruleNodeRef = rule.getNodeRef();
if (ruleNodeRef == null)
{
if (this.nodeService.hasAspect(nodeRef, RuleModel.ASPECT_RULES) == false)
{
// Add the actionable aspect
this.nodeService.addAspect(nodeRef, RuleModel.ASPECT_RULES, null);
}
// Create the action node
ruleNodeRef = this.nodeService.createNode(
getSavedRuleFolderRef(nodeRef),
ContentModel.ASSOC_CONTAINS,
QName.createQName(RuleModel.RULE_MODEL_URI, ASSOC_NAME_RULES_PREFIX + GUID.generate()),
RuleModel.TYPE_RULE).getChildRef();
// Set the rule node reference and the owning node reference
rule.setNodeRef(ruleNodeRef);
}
// Update the properties of the rule
this.nodeService.setProperty(ruleNodeRef, ContentModel.PROP_TITLE, rule.getTitle());
this.nodeService.setProperty(ruleNodeRef, ContentModel.PROP_DESCRIPTION, rule.getDescription());
this.nodeService.setProperty(ruleNodeRef, RuleModel.PROP_RULE_TYPE, (Serializable)rule.getRuleTypes());
this.nodeService.setProperty(ruleNodeRef, RuleModel.PROP_APPLY_TO_CHILDREN, rule.isAppliedToChildren());
this.nodeService.setProperty(ruleNodeRef, RuleModel.PROP_EXECUTE_ASYNC, rule.getExecuteAsynchronously());
this.nodeService.setProperty(ruleNodeRef, RuleModel.PROP_DISABLED, rule.getRuleDisabled());
// Save the rule's action
saveAction(ruleNodeRef, rule);
}
finally
{
enableRules();
}
}
else
{
throw new RuleServiceException("Insufficient permissions to save a rule.");
}
}
/**
@@ -667,22 +691,29 @@ public class RuleServiceImpl implements RuleService, RuntimeRuleService
*/
public void removeRule(NodeRef nodeRef, Rule rule)
{
if (this.nodeService.exists(nodeRef) == true &&
this.nodeService.hasAspect(nodeRef, RuleModel.ASPECT_RULES) == true)
if (this.permissionService.hasPermission(nodeRef, PermissionService.CHANGE_PERMISSIONS) == AccessStatus.ALLOWED)
{
disableRules(nodeRef);
try
{
NodeRef ruleNodeRef = rule.getNodeRef();
if (ruleNodeRef != null)
{
this.nodeService.removeChild(getSavedRuleFolderRef(nodeRef), ruleNodeRef);
}
}
finally
{
enableRules(nodeRef);
}
if (this.nodeService.exists(nodeRef) == true &&
this.nodeService.hasAspect(nodeRef, RuleModel.ASPECT_RULES) == true)
{
disableRules(nodeRef);
try
{
NodeRef ruleNodeRef = rule.getNodeRef();
if (ruleNodeRef != null)
{
this.nodeService.removeChild(getSavedRuleFolderRef(nodeRef), ruleNodeRef);
}
}
finally
{
enableRules(nodeRef);
}
}
}
else
{
throw new RuleServiceException("Insufficient permissions to remove a rule.");
}
}
@@ -691,20 +722,27 @@ public class RuleServiceImpl implements RuleService, RuntimeRuleService
*/
public void removeAllRules(NodeRef nodeRef)
{
if (this.nodeService.exists(nodeRef) == true &&
this.nodeService.hasAspect(nodeRef, RuleModel.ASPECT_RULES) == true)
if (this.permissionService.hasPermission(nodeRef, PermissionService.CHANGE_PERMISSIONS) == AccessStatus.ALLOWED)
{
NodeRef folder = getSavedRuleFolderRef(nodeRef);
if (folder != null)
{
List<ChildAssociationRef> ruleChildAssocs = this.nodeService.getChildAssocs(
folder,
RegexQNamePattern.MATCH_ALL, ASSOC_NAME_RULES_REGEX);
for (ChildAssociationRef ruleChildAssoc : ruleChildAssocs)
{
this.nodeService.removeChild(folder, ruleChildAssoc.getChildRef());
}
}
if (this.nodeService.exists(nodeRef) == true &&
this.nodeService.hasAspect(nodeRef, RuleModel.ASPECT_RULES) == true)
{
NodeRef folder = getSavedRuleFolderRef(nodeRef);
if (folder != null)
{
List<ChildAssociationRef> ruleChildAssocs = this.nodeService.getChildAssocs(
folder,
RegexQNamePattern.MATCH_ALL, ASSOC_NAME_RULES_REGEX);
for (ChildAssociationRef ruleChildAssoc : ruleChildAssocs)
{
this.nodeService.removeChild(folder, ruleChildAssoc.getChildRef());
}
}
}
}
else
{
throw new RuleServiceException("Insufficient permissions to remove a rule.");
}
}

View File

@@ -27,6 +27,8 @@ import org.alfresco.repo.action.evaluator.ComparePropertyValueEvaluator;
import org.alfresco.repo.action.executer.ImageTransformActionExecuter;
import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
import org.alfresco.repo.transaction.TransactionUtil;
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
import org.alfresco.service.cmr.action.Action;
import org.alfresco.service.cmr.action.ActionCondition;
import org.alfresco.service.cmr.repository.ContentWriter;
@@ -34,7 +36,10 @@ import org.alfresco.service.cmr.repository.CyclicChildRelationshipException;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.rule.Rule;
import org.alfresco.service.cmr.rule.RuleType;
import org.alfresco.service.cmr.security.AuthenticationService;
import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.namespace.QName;
import org.apache.commons.digester.SetRootRule;
/**
@@ -44,7 +49,17 @@ import org.alfresco.service.namespace.QName;
*/
public class RuleServiceImplTest extends BaseRuleTest
{
AuthenticationService authenticationService;
PermissionService permissionService;
@Override
protected void onSetUpInTransaction() throws Exception
{
super.onSetUpInTransaction();
this.permissionService = (PermissionService)this.applicationContext.getBean("permissionService");
this.authenticationService = (AuthenticationService)this.applicationContext.getBean("authenticationService");
}
/**
* Test get rule type
*/
@@ -296,6 +311,59 @@ public class RuleServiceImplTest extends BaseRuleTest
ContentModel.TYPE_CONTAINER).getChildRef();
}
public void testRuleServicePermissionsConsumer()
{
this.authenticationService.createAuthentication("conUser", "password".toCharArray());
this.permissionService.setPermission(this.nodeRef, "conUser", PermissionService.CONSUMER, true);
this.permissionService.setInheritParentPermissions(this.nodeRef, true);
this.authenticationService.authenticate("conUser", "password".toCharArray());
Rule rule = createTestRule();
try
{
this.ruleService.saveRule(this.nodeRef, rule);
// Fail
fail("Consumers cannot create rules.");
}
catch (Exception exception)
{
// Ok
}
}
public void testRuleServicePermissionsEditor()
{
this.authenticationService.createAuthentication("editorUser", "password".toCharArray());
this.permissionService.setPermission(this.nodeRef, "editorUser", PermissionService.EDITOR, true);
this.permissionService.setInheritParentPermissions(this.nodeRef, true);
this.authenticationService.authenticate("editorUser", "password".toCharArray());
Rule rule = createTestRule();
try
{
this.ruleService.saveRule(this.nodeRef, rule);
// Fail
fail("Editors cannot create rules.");
}
catch (Exception exception)
{
// Ok
}
}
public void testRuleServicePermissionsCoordinator()
{
this.authenticationService.createAuthentication("coordUser", "password".toCharArray());
this.permissionService.setPermission(this.nodeRef, "coordUser", PermissionService.COORDINATOR, true);
this.permissionService.setInheritParentPermissions(this.nodeRef, true);
this.authenticationService.authenticate("admin", "admin".toCharArray());
Rule rule2 = createTestRule();
this.ruleService.saveRule(this.nodeRef, rule2);
this.authenticationService.clearCurrentSecurityContext();
}
/**
* Tests the rule inheritance within the store, checking that the cache is reset correctly when
* rules are added and removed.

View File

@@ -49,6 +49,9 @@ import org.alfresco.service.namespace.NamespacePrefixResolver;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.ISO9075;
import org.alfresco.util.SearchLanguageConversion;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.search.Hits;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Searcher;
@@ -230,7 +233,7 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
switch (sd.getSortType())
{
case FIELD:
if (searcher.getReader().getFieldNames().contains(sd.getField()))
if (fieldHasTerm(searcher.getReader(), sd.getField()))
{
fields[index++] = new SortField(sd.getField(), !sd.isAscending());
}
@@ -308,6 +311,35 @@ public class LuceneSearcherImpl2 extends LuceneBase2 implements LuceneSearcher2
}
}
private static boolean fieldHasTerm(IndexReader indexReader, String field)
{
try
{
TermEnum termEnum = indexReader.terms(new Term(field, ""));
try
{
if (termEnum.next())
{
Term first = termEnum.term();
return first.field().equals(field);
}
else
{
return false;
}
}
finally
{
termEnum.close();
}
}
catch (IOException e)
{
throw new SearcherException("Could not find terms for sort field ", e);
}
}
public ResultSet query(StoreRef store, String language, String query)
{
return query(store, language, query, null, null);

View File

@@ -16,6 +16,8 @@
*/
package org.alfresco.repo.security.permissions.dynamic;
import java.io.Serializable;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.security.permissions.DynamicAuthority;
import org.alfresco.service.cmr.lock.LockService;
@@ -44,9 +46,13 @@ public class LockOwnerDynamicAuthority implements DynamicAuthority, Initializing
}
if (nodeService.hasAspect(nodeRef, ContentModel.ASPECT_WORKING_COPY))
{
NodeRef original = DefaultTypeConverter.INSTANCE.convert(
NodeRef.class, nodeService.getProperty(nodeRef, ContentModel.PROP_COPY_REFERENCE));
if (nodeService.exists(original))
NodeRef original = null;
Serializable reference = nodeService.getProperty(nodeRef, ContentModel.PROP_COPY_REFERENCE);
if (reference != null)
{
original = DefaultTypeConverter.INSTANCE.convert(NodeRef.class, reference);
}
if (original != null && nodeService.exists(original))
{
return (lockService.getLockStatus(original) == LockStatus.LOCK_OWNER);
}

View File

@@ -44,7 +44,6 @@ import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.InitializingBean;
/**
*
* @author andyh
*/
@@ -153,8 +152,8 @@ public class ACLEntryVoter implements AccessDecisionVoter, InitializingBean
if ((attribute.getAttribute() != null)
&& (attribute.getAttribute().startsWith(ACL_NODE)
|| attribute.getAttribute().startsWith(ACL_PARENT)
|| attribute.getAttribute().startsWith(ACL_ALLOW)
|| attribute.getAttribute().startsWith(ACL_METHOD)))
|| attribute.getAttribute().startsWith(ACL_ALLOW) || attribute.getAttribute().startsWith(
ACL_METHOD)))
{
return true;
}
@@ -165,14 +164,11 @@ public class ACLEntryVoter implements AccessDecisionVoter, InitializingBean
}
/**
* This implementation supports only <code>MethodSecurityInterceptor</code>,
* because it queries the presented <code>MethodInvocation</code>.
* This implementation supports only <code>MethodSecurityInterceptor</code>, because it queries the presented <code>MethodInvocation</code>.
*
* @param clazz
* the secure object
*
* @return <code>true</code> if the secure object is
* <code>MethodInvocation</code>, <code>false</code> otherwise
* @return <code>true</code> if the secure object is <code>MethodInvocation</code>, <code>false</code> otherwise
*/
public boolean supports(Class clazz)
{
@@ -253,7 +249,15 @@ public class ACLEntryVoter implements AccessDecisionVoter, InitializingBean
testNodeRef = (NodeRef) invocation.getArguments()[cad.parameter];
if (log.isDebugEnabled())
{
log.debug("\tPermission test on node " + nodeService.getPath(testNodeRef));
if (nodeService.exists(testNodeRef))
{
log.debug("\tPermission test on node " + nodeService.getPath(testNodeRef));
}
else
{
log.debug("\tPermission test on non-existing node " +testNodeRef);
}
}
}
else if (ChildAssociationRef.class.isAssignableFrom(params[cad.parameter]))
@@ -263,7 +267,14 @@ public class ACLEntryVoter implements AccessDecisionVoter, InitializingBean
testNodeRef = ((ChildAssociationRef) invocation.getArguments()[cad.parameter]).getChildRef();
if (log.isDebugEnabled())
{
log.debug("\tPermission test on node " + nodeService.getPath(testNodeRef));
if (nodeService.exists(testNodeRef))
{
log.debug("\tPermission test on node " + nodeService.getPath(testNodeRef));
}
else
{
log.debug("\tPermission test on non-existing node " + testNodeRef);
}
}
}
}
@@ -284,6 +295,14 @@ public class ACLEntryVoter implements AccessDecisionVoter, InitializingBean
testNodeRef = nodeService.getPrimaryParent(child).getParentRef();
if (log.isDebugEnabled())
{
if (nodeService.exists(testNodeRef))
{
log.debug("\tPermission test for parent on node " + nodeService.getPath(testNodeRef));
}
else
{
log.debug("\tPermission test for parent on non-existing node " + testNodeRef);
}
log.debug("\tPermission test for parent on node " + nodeService.getPath(testNodeRef));
}
}
@@ -295,8 +314,17 @@ public class ACLEntryVoter implements AccessDecisionVoter, InitializingBean
testNodeRef = ((ChildAssociationRef) invocation.getArguments()[cad.parameter]).getParentRef();
if (log.isDebugEnabled())
{
log.debug("\tPermission test for parent on child assoc ref for node "
+ nodeService.getPath(testNodeRef));
if (nodeService.exists(testNodeRef))
{
log.debug("\tPermission test for parent on child assoc ref for node "
+ nodeService.getPath(testNodeRef));
}
else
{
log.debug("\tPermission test for parent on child assoc ref for non existing node "
+ testNodeRef);
}
}
}

View File

@@ -0,0 +1,123 @@
/*
* Copyright (C) 2005 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.template;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.alfresco.repo.jscript.CategoryTemplateNode;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.repository.TemplateImageResolver;
import org.alfresco.service.cmr.repository.TemplateNode;
import org.alfresco.service.cmr.search.CategoryService;
import org.alfresco.service.namespace.QName;
/**
* Support for finding classifications and their root categories.
*
* @author Andy Hind
*/
public final class Classification
{
private ServiceRegistry services;
private TemplateImageResolver imageResolver;
private StoreRef storeRef;
public Classification(StoreRef storeRef, ServiceRegistry services, TemplateImageResolver imageResolver)
{
this.storeRef = storeRef;
this.services = services;
this.imageResolver = imageResolver;
}
/**
* Find all the category nodes in a given classification.
*
* @param aspect
*
* @return all the category nodes in a given classification.
*/
public List<CategoryTemplateNode> getAllCategoryNodes(String aspect)
{
return buildCategoryNodes(services.getCategoryService().getCategories(storeRef, createQName(aspect),
CategoryService.Depth.ANY));
}
/**
* Find all the category nodes in a given classification.
*
* @param aspect
*
* @return all the category nodes in a given classification.
*/
public List<CategoryTemplateNode> getAllCategoryNodes(QName aspect)
{
return buildCategoryNodes(services.getCategoryService().getCategories(storeRef, aspect,
CategoryService.Depth.ANY));
}
/**
* @return all the aspects that define a classification.
*/
public List<QName> getAllClassificationAspects()
{
Collection<QName> aspects = services.getCategoryService().getClassificationAspects();
ArrayList<QName> answer = new ArrayList<QName>(aspects.size());
answer.addAll(aspects);
return answer;
}
/**
* Get the root categories in a classification.
*
* @param aspect
*
* @return List of TemplateNode
*/
public List<CategoryTemplateNode> getRootCategories(String aspect)
{
return buildCategoryNodes(services.getCategoryService().getRootCategories(storeRef, createQName(aspect)));
}
private List<CategoryTemplateNode> buildCategoryNodes(Collection<ChildAssociationRef> cars)
{
ArrayList<CategoryTemplateNode> categoryNodes = new ArrayList<CategoryTemplateNode>(cars.size());
for (ChildAssociationRef car : cars)
{
categoryNodes.add(new CategoryTemplateNode(car.getChildRef(), this.services, this.imageResolver));
}
return categoryNodes;
}
private QName createQName(String s)
{
QName qname;
if (s.indexOf(QName.NAMESPACE_BEGIN) != -1)
{
qname = QName.createQName(s);
}
else
{
qname = QName.createQName(s, this.services.getNamespaceService());
}
return qname;
}
}

View File

@@ -301,6 +301,13 @@ public class FreeMarkerProcessor implements TemplateProcessor
// current date/time is useful to have and isn't supplied by FreeMarker by default
model.put("date", new Date());
// Session support
model.put("session", new Session(services, imageResolver));
// Classification support
model.put("classification", new Classification(companyHome.getStoreRef(), services, imageResolver));
// add custom method objects
model.put("hasAspect", new HasAspectMethod());
model.put("message", new I18NMessageMethod());

View File

@@ -0,0 +1,50 @@
/*
* Copyright (C) 2005 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.template;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.repository.TemplateImageResolver;
/**
* Support session information in free marker templates.
*
* @author Andy Hind
*/
public class Session
{
private ServiceRegistry services;
@SuppressWarnings("unused")
private TemplateImageResolver imageResolver;
public Session(ServiceRegistry services, TemplateImageResolver imageResolver)
{
this.services = services;
this.imageResolver = imageResolver;
}
/**
* Get the current authentication ticket.
*
* @return
*/
public String getTicket()
{
return services.getAuthenticationService().getCurrentTicket();
}
}

View File

@@ -59,4 +59,16 @@ public interface VersionCounterService
* @param storeRef the store reference
*/
public void resetVersionNumber(StoreRef storeRef);
/**
* Sets the version number for a specified store.
*
* WARNING: calling this method will completely reset the current
* version count for the specified store and cannot be undone.
*
* @param storeRef the store reference
* @param versionCount the new version count
*/
public void setVersionNumber(StoreRef storeRef, int versionCount);
}

View File

@@ -31,11 +31,10 @@ import org.alfresco.service.cmr.workflow.WorkflowDeployment;
import org.alfresco.service.cmr.workflow.WorkflowException;
import org.alfresco.service.cmr.workflow.WorkflowService;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.AbstractLifecycleBean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.context.event.ContextRefreshedEvent;
import org.springframework.core.io.ClassPathResource;
@@ -44,7 +43,7 @@ import org.springframework.core.io.ClassPathResource;
*
* @author davidc
*/
public class WorkflowDeployer implements ApplicationListener
public class WorkflowDeployer extends AbstractLifecycleBean
{
// Logging support
private static Log logger = LogFactory.getLog("org.alfresco.repo.workflow");
@@ -222,16 +221,16 @@ public class WorkflowDeployer implements ApplicationListener
}
}
/*
* (non-Javadoc)
* @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent)
*/
public void onApplicationEvent(ApplicationEvent event)
@Override
protected void onBootstrap(ApplicationEvent event)
{
if (event instanceof ContextRefreshedEvent)
{
deploy();
}
deploy();
}
@Override
protected void onShutdown(ApplicationEvent event)
{
// NOOP
}
}

View File

@@ -0,0 +1,707 @@
/*
* Copyright (C) 2006 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.workflow;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.alfresco.i18n.I18NUtil;
import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.security.PersonService;
import org.alfresco.service.cmr.workflow.WorkflowDefinition;
import org.alfresco.service.cmr.workflow.WorkflowDeployment;
import org.alfresco.service.cmr.workflow.WorkflowInstance;
import org.alfresco.service.cmr.workflow.WorkflowPath;
import org.alfresco.service.cmr.workflow.WorkflowService;
import org.alfresco.service.cmr.workflow.WorkflowTask;
import org.alfresco.service.cmr.workflow.WorkflowTaskState;
import org.alfresco.service.cmr.workflow.WorkflowTransition;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.ApplicationContextHelper;
import org.springframework.context.ApplicationContext;
import org.springframework.core.io.ClassPathResource;
/**
* An interactive console for Workflows.
*
* @author davidc
*/
public class WorkflowInterpreter
{
// Service dependencies
private WorkflowService workflowService;
private NamespaceService namespaceService;
private PersonService personService;
/**
* The reader for interaction.
*/
private BufferedReader fIn;
/**
* Current context
*/
private WorkflowDefinition currentWorkflowDef = null;
private WorkflowPath currentPath = null;
private String currentDeploy = null;
/**
* Last command issued
*/
private String lastCommand = null;
/**
* Variables
*/
private Map<QName, Serializable> vars = new HashMap<QName, Serializable>();
/**
* Main entry point.
*
* Syntax: AVMInteractiveConsole storage (new|old).
*/
public static void main(String[] args)
{
ApplicationContext context = ApplicationContextHelper.getApplicationContext();
WorkflowInterpreter console = (WorkflowInterpreter)context.getBean("workflowInterpreter");
AuthenticationUtil.setSystemUserAsCurrentUser();
console.rep();
System.exit(0);
}
/**
* Make up a new console.
*/
public WorkflowInterpreter()
{
fIn = new BufferedReader(new InputStreamReader(System.in));
}
/**
* @param workflowService The Workflow Service
*/
public void setWorkflowService(WorkflowService workflowService)
{
this.workflowService = workflowService;
}
/**
* @param namespaceService namespaceService
*/
public void setNamespaceService(NamespaceService namespaceService)
{
this.namespaceService = namespaceService;
}
/**
* @param personService personService
*/
public void setPersonService(PersonService personService)
{
this.personService = personService;
}
/**
* A Read-Eval-Print loop.
*/
public void rep()
{
while (true)
{
System.out.print("ok> ");
try
{
String line = fIn.readLine();
if (line.equals("exit"))
{
return;
}
long startms = System.currentTimeMillis();
System.out.print(interpretCommand(line));
System.out.println("" + (System.currentTimeMillis() - startms) + "ms");
}
catch (Exception e)
{
e.printStackTrace(System.err);
System.out.println("");
}
}
}
/**
* Interpret a single command using the BufferedReader passed in for any data needed.
*
* @param line The unparsed command
* @return The textual output of the command.
*/
public String interpretCommand(String line)
throws IOException
{
String[] command = line.split(" ");
if (command.length == 0)
{
command = new String[1];
command[0] = line;
}
ByteArrayOutputStream bout = new ByteArrayOutputStream();
PrintStream out = new PrintStream(bout);
// repeat last command?
if (command[0].equals("r"))
{
if (lastCommand == null)
{
return "No command entered yet.";
}
return "repeating command " + lastCommand + "\n\n" + interpretCommand(lastCommand);
}
// remember last command
lastCommand = line;
// execute command
if (command[0].equals("help"))
{
String helpFile = I18NUtil.getMessage("workflow_console.help");
ClassPathResource helpResource = new ClassPathResource(helpFile);
byte[] helpBytes = new byte[500];
InputStream helpStream = helpResource.getInputStream();
try
{
int read = helpStream.read(helpBytes);
while (read != -1)
{
bout.write(helpBytes, 0, read);
read = helpStream.read(helpBytes);
}
}
finally
{
helpStream.close();
}
}
else if (command[0].equals("show"))
{
if (command.length < 2)
{
return "Syntax Error.\n";
}
else if (command[1].equals("definitions"))
{
List<WorkflowDefinition> defs = workflowService.getDefinitions();
for (WorkflowDefinition def : defs)
{
out.println("id: " + def.id + " , name: " + def.name + " , title: " + def.title + " , version: " + def.version);
}
}
else if (command[1].equals("workflows"))
{
if (currentWorkflowDef == null)
{
return "workflow definition not in use. Enter command use <workflowDefId>.\n";
}
List<WorkflowInstance> workflows = workflowService.getActiveWorkflows(currentWorkflowDef.id);
for (WorkflowInstance workflow : workflows)
{
out.println("id: " + workflow.id + " , desc: " + workflow.description + " , start date: " + workflow.startDate + " , def: " + workflow.definition.title);
}
}
else if (command[1].equals("paths"))
{
String workflowId = (command.length == 3) ? command[2] : (currentPath == null) ? null : currentPath.instance.id;
if (workflowId == null)
{
return "Syntax Error. Workflow Id not specified.\n";
}
List<WorkflowPath> paths = workflowService.getWorkflowPaths(workflowId);
for (WorkflowPath path : paths)
{
out.println("path id: " + path.id + " , node: " + path.node.name);
}
}
else if (command[1].equals("tasks"))
{
String pathId = (command.length == 3) ? command[2] : (currentPath == null) ? null : currentPath.id;
if (pathId == null)
{
return "Syntax Error. Path Id not specified.\n";
}
List<WorkflowTask> tasks = workflowService.getTasksForWorkflowPath(pathId);
for (WorkflowTask task : tasks)
{
out.println("task id: " + task.id + " , name: " + task.name + " , properties: " + task.properties.size());
}
}
else if (command[1].equals("transitions"))
{
String workflowId = (command.length == 3) ? command[2] : (currentPath == null) ? null : currentPath.instance.id;
if (workflowId == null)
{
return "Syntax Error. Workflow Id not specified.\n";
}
List<WorkflowPath> paths = workflowService.getWorkflowPaths(workflowId);
for (WorkflowPath path : paths)
{
out.println("path: " + path.id + " , node: " + path.node.name + " , active: " + path.active);
List<WorkflowTask> tasks = workflowService.getTasksForWorkflowPath(path.id);
for (WorkflowTask task : tasks)
{
out.println(" task id: " + task.id + " , name: " + task.name + " , properties: " + task.properties.size());
}
for (WorkflowTransition transition : path.node.transitions)
{
out.println(" transition id: " + ((transition.id == null || transition.id.equals("")) ? "[default]" : transition.id) + " , title: " + transition.title);
}
}
}
else if (command[1].equals("my"))
{
if (command.length != 3)
{
return "Syntax Error.\n";
}
if (command[2].equals("tasks"))
{
out.println(AuthenticationUtil.getCurrentUserName() + ":");
List<WorkflowTask> tasks = workflowService.getAssignedTasks(AuthenticationUtil.getCurrentUserName(), WorkflowTaskState.IN_PROGRESS);
for (WorkflowTask task : tasks)
{
out.println("id: " + task.id + " , name: " + task.name + " , properties: " + task.properties.size() + " , workflow: " + task.path.instance.id + " , path: " + task.path.id);
}
}
else if (command[2].equals("completed"))
{
out.println(AuthenticationUtil.getCurrentUserName() + ":");
List<WorkflowTask> tasks = workflowService.getAssignedTasks(AuthenticationUtil.getCurrentUserName(), WorkflowTaskState.COMPLETED);
for (WorkflowTask task : tasks)
{
out.println("id: " + task.id + " , name " + task.name + " , properties: " + task.properties.size() + " , workflow: " + task.path.instance.id + " , path: " + task.path.id);
}
}
else
{
return "Syntax Error.\n";
}
}
else
{
return "Syntax Error.\n";
}
}
else if (command[0].equals("desc"))
{
if (command.length < 2)
{
return "Syntax Error.\n";
}
if (command[1].equals("task"))
{
if (command.length != 3)
{
return "Syntax Error.\n";
}
WorkflowTask task = workflowService.getTaskById(command[2]);
out.println("id: " + task.id);
out.println("name: " + task.name);
out.println("title: " + task.title);
out.println("description: " + task.description);
out.println("state: " + task.state);
out.println("path: " + task.path.id);
out.println("transitions: " + task.path.node.transitions.length);
for (WorkflowTransition transition : task.path.node.transitions)
{
out.println(" transition: " + ((transition == null || transition.id.equals("")) ? "[default]" : transition.id) + " , title: " + transition.title + " , desc: " + transition.description);
}
out.println("properties: " + task.properties.size());
for (Map.Entry<QName, Serializable> prop : task.properties.entrySet())
{
out.println(" " + prop.getKey() + " = " + prop.getValue());
}
}
else if (command[1].equals("workflow"))
{
if (command.length != 3)
{
return "Syntax Error.\n";
}
WorkflowInstance workflow = workflowService.getWorkflowById(command[2]);
out.println("definition: " + workflow.definition.name);
out.println("id: " + workflow.id);
out.println("description: " + workflow.description);
out.println("active: " + workflow.active);
out.println("start date: " + workflow.startDate);
out.println("end date: " + workflow.endDate);
out.println("initiator: " + workflow.initiator);
out.println("context: " + workflow.context);
out.println("package: " + workflow.workflowPackage);
}
else
{
return "Syntax Error.\n";
}
}
else if (command[0].equals("deploy"))
{
if (command.length != 2)
{
return "Syntax Error.\n";
}
ClassPathResource workflowDef = new ClassPathResource(command[1]);
WorkflowDeployment deployment = workflowService.deployDefinition("jbpm", workflowDef.getInputStream(), MimetypeMap.MIMETYPE_XML);
WorkflowDefinition def = deployment.definition;
for (String problem : deployment.problems)
{
out.println(problem);
}
out.println("deployed definition id: " + def.id + " , name: " + def.name + " , title: " + def.title + " , version: " + def.version);
currentDeploy = command[1];
out.print(interpretCommand("use " + def.id));
}
else if (command[0].equals("redeploy"))
{
if (currentDeploy == null)
{
return "nothing to redeploy\n";
}
out.print(interpretCommand("deploy " + currentDeploy));
}
else if (command[0].equals("use"))
{
if (command.length == 1)
{
out.println("definition: " + ((currentWorkflowDef == null) ? "None" : currentWorkflowDef.id + " , name: " + currentWorkflowDef.title));
out.println("workflow: " + ((currentPath == null) ? "None" : currentPath.instance.id + " , active: " + currentPath.instance.active));
out.println("path: " + ((currentPath == null) ? "None" : currentPath.id + " , node: " + currentPath.node.title));
}
else if (command.length > 1)
{
if (command[1].equals("definition"))
{
if (command.length != 3)
{
return "Syntax Error.\n";
}
WorkflowDefinition def = workflowService.getDefinitionById(command[2]);
if (def == null)
{
return "Not found.\n";
}
currentWorkflowDef = def;
currentPath = null;
out.print(interpretCommand("use"));
}
else if (command[1].equals("workflow"))
{
if (command.length != 3)
{
return "Syntax Error.\n";
}
WorkflowInstance instance = workflowService.getWorkflowById(command[2]);
currentWorkflowDef = instance.definition;
currentPath = workflowService.getWorkflowPaths(instance.id).get(0);
out.print(interpretCommand("use"));
}
else
{
return "Syntax Error.\n";
}
}
}
else if (command[0].equals("user"))
{
if (command.length == 2)
{
AuthenticationUtil.setCurrentUser(command[1]);
}
out.println("using user " + AuthenticationUtil.getCurrentUserName());
}
else if (command[0].equals("start"))
{
Map<QName, Serializable> params = new HashMap<QName, Serializable>();
for (int i = 1; i < command.length; i++)
{
String[] param = command[i].split("=");
QName qname = QName.createQName(param[0], namespaceService);
if (param.length == 1)
{
if (!vars.containsKey(qname))
{
return "var " + qname + " not found.\n";
}
params.put(qname, vars.get(qname));
}
else if (param.length == 2)
{
params.put(qname, param[1]);
}
else
{
return "Syntax Error.\n";
}
}
WorkflowPath path = workflowService.startWorkflow(currentWorkflowDef.id, params);
out.println("started workflow id: " + path.instance.id + ", path: " + path.id + " , node: " + path.node.name + " , def: " + path.instance.definition.title);
currentPath = path;
}
else if (command[0].equals("update"))
{
if (command.length < 3)
{
return "Syntax Error.\n";
}
if (command[1].equals("task"))
{
if (command.length < 4)
{
return "Syntax Error.\n";
}
Map<QName, Serializable> params = new HashMap<QName, Serializable>();
for (int i = 3; i < command.length; i++)
{
String[] param = command[i].split("=");
QName qname = QName.createQName(param[0], namespaceService);
if (param.length == 1)
{
if (!vars.containsKey(qname))
{
return "var " + qname + " not found.\n";
}
params.put(qname, vars.get(qname));
}
else if (param.length == 2)
{
params.put(qname, param[1]);
}
else
{
return "Syntax Error.\n";
}
}
WorkflowTask task = workflowService.updateTask(command[2], params, null, null);
out.println("updated task id: " + command[2] + ", properties: " + task.properties.size());
}
else
{
return "Syntax Error.\n";
}
}
else if (command[0].equals("signal"))
{
if (command.length < 2)
{
return "Syntax Error.\n";
}
WorkflowPath path = workflowService.signal(command[1], (command.length == 3) ? command[2] : null);
out.println("signal sent - path id: " + path.id + " , node: " + path.node.name);
}
else if (command[0].equals("end"))
{
if (command.length < 3)
{
return "Syntax Error.\n";
}
if (command[1].equals("task"))
{
WorkflowTask task = workflowService.endTask(command[2], (command.length == 4) ? command[3] : null);
out.println("signal sent - path id: " + task.path.id + " , node: " + task.path.node.name);
}
else if (command[1].equals("workflow"))
{
String workflowId = (command.length == 3) ? command[2] : (currentPath == null) ? null : currentPath.instance.id;
if (workflowId == null)
{
return "Syntax Error. Workflow Id not specified.\n";
}
workflowService.cancelWorkflow(workflowId);
out.println("cancelled workflow" + workflowId);
}
else
{
return "Syntax Error.\n";
}
}
else if (command[0].equals("var"))
{
if (command.length == 1)
{
for (Map.Entry<QName, Serializable> entry : vars.entrySet())
{
out.println(entry.getKey() + " = " + entry.getValue());
}
}
else if (command.length == 2)
{
String[] param = command[1].split("=");
if (param.length == 0)
{
return "Syntax Error.\n";
}
if (param.length == 1)
{
QName qname = QName.createQName(param[0], namespaceService);
vars.remove(qname);
out.println("deleted var " + qname);
}
else if (param.length == 2)
{
boolean multi = false;
if (param[0].endsWith("*"))
{
param[0] = param[0].substring(0, param[0].length() -1);
multi = true;
}
QName qname = QName.createQName(param[0], namespaceService);
String[] strValues = param[1].split(",");
if (!multi && strValues.length > 1)
{
return "Syntax Error.\n";
}
if (!multi)
{
vars.put(qname, strValues[0]);
}
else
{
List<String> values = new ArrayList<String>();
for (String strValue : strValues)
{
values.add(strValue);
}
vars.put(qname, (Serializable)values);
}
out.println("set var " + qname + " = " + vars.get(qname));
}
else
{
return "Syntax Error.\n";
}
}
else if (command.length == 4)
{
if (command[2].equals("person"))
{
boolean multi = false;
if (command[1].endsWith("*"))
{
command[1] = command[1].substring(0, command[1].length() -1);
multi = true;
}
QName qname = QName.createQName(command[1], namespaceService);
String[] strValues = command[3].split(",");
if (!multi && strValues.length > 1)
{
return "Syntax Error.\n";
}
if (!multi)
{
NodeRef auth = personService.getPerson(strValues[0]);
vars.put(qname, auth);
}
else
{
List<NodeRef> values = new ArrayList<NodeRef>();
for (String strValue : strValues)
{
NodeRef auth = personService.getPerson(strValue);
values.add(auth);
}
vars.put(qname, (Serializable)values);
}
out.println("set var " + qname + " = " + vars.get(qname));
}
else
{
return "Syntax Error.\n";
}
}
else
{
return "Syntax Error.\n";
}
}
else
{
return "Syntax Error.\n";
}
out.flush();
String retVal = new String(bout.toByteArray());
out.close();
return retVal;
}
/**
* Get currently used workflow definition
*
* @return workflow definition
*/
public WorkflowDefinition getCurrentWorkflowDef()
{
return currentWorkflowDef;
}
/**
* Get current user name
*
* @return user name
*/
public String getCurrentUserName()
{
return AuthenticationUtil.getCurrentUserName();
}
}

View File

@@ -25,6 +25,7 @@ import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.cmr.workflow.WorkflowException;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
@@ -47,6 +48,7 @@ public class WorkflowPackageImpl implements WorkflowPackageComponent
private SearchService searchService;
private NodeService nodeService;
private NamespaceService namespaceService;
private PermissionService permissionService;
private NodeRef systemWorkflowContainer = null;
@@ -74,6 +76,11 @@ public class WorkflowPackageImpl implements WorkflowPackageComponent
this.nodeService = nodeService;
}
public void setPermissionService(PermissionService permissionService)
{
this.permissionService = permissionService;
}
/**
* @param namespaceService namespace service
*/
@@ -113,6 +120,8 @@ public class WorkflowPackageImpl implements WorkflowPackageComponent
QName qname = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, containerName);
ChildAssociationRef childRef = nodeService.createNode(packages, ContentModel.ASSOC_CONTAINS, qname, ContentModel.TYPE_SYSTEM_FOLDER);
container = childRef.getChildRef();
// TODO: For now, grant full access to everyone
permissionService.setPermission(container, PermissionService.ALL_AUTHORITIES, PermissionService.ALL_PERMISSIONS, true);
isSystemPackage = true;
}

View File

@@ -311,10 +311,25 @@ public class JBPMEngine extends BPMEngine
/* (non-Javadoc)
* @see org.alfresco.repo.workflow.WorkflowDefinitionComponent#getDefinitionById(java.lang.String)
*/
public WorkflowDefinition getDefinitionById(String workflowDefinitionId)
public WorkflowDefinition getDefinitionById(final String workflowDefinitionId)
{
// TODO
throw new UnsupportedOperationException();
try
{
return (WorkflowDefinition)jbpmTemplate.execute(new JbpmCallback()
{
public Object doInJbpm(JbpmContext context)
{
// retrieve process
GraphSession graphSession = context.getGraphSession();
ProcessDefinition processDefinition = graphSession.getProcessDefinition(getJbpmId(workflowDefinitionId));
return processDefinition == null ? null : createWorkflowDefinition(processDefinition);
}
});
}
catch(JbpmException e)
{
throw new WorkflowException("Failed to retrieve workflow definition '" + workflowDefinitionId + "'", e);
}
}
/* (non-Javadoc)
@@ -1714,7 +1729,7 @@ public class JBPMEngine extends BPMEngine
workflowTransition.id = transition.getName();
Node node = transition.getFrom();
workflowTransition.isDefault = node.getDefaultLeavingTransition().equals(transition);
if (workflowTransition.id.length() == 0)
if (workflowTransition.id == null || workflowTransition.id.length() == 0)
{
workflowTransition.title = getLabel(DEFAULT_TRANSITION_LABEL, TITLE_LABEL, workflowTransition.id);
workflowTransition.description = getLabel(DEFAULT_TRANSITION_LABEL, DESC_LABEL, workflowTransition.title);

View File

@@ -341,8 +341,10 @@ public class JBPMEngineTest extends BaseSpringTest
public void testSignal()
{
Map<QName, Serializable> parameters = new HashMap<QName, Serializable>();
parameters.put(QName.createQName(NamespaceService.DEFAULT_URI, "testNode"), testNodeRef);
WorkflowDefinition workflowDef = getTestDefinition();
WorkflowPath path = workflowComponent.startWorkflow(workflowDef.id, null);
WorkflowPath path = workflowComponent.startWorkflow(workflowDef.id, parameters);
assertNotNull(path);
WorkflowPath updatedPath = workflowComponent.signal(path.id, path.node.transitions[1].id);
assertNotNull(updatedPath);
@@ -374,6 +376,26 @@ public class JBPMEngineTest extends BaseSpringTest
}
public void xtestMultiAssign()
{
WorkflowDefinition workflowDef = getTestDefinition();
List<String> bpm_assignees = new ArrayList<String>();
bpm_assignees.add("admin");
bpm_assignees.add("bob");
bpm_assignees.add("fred");
Map<QName, Serializable> parameters = new HashMap<QName, Serializable>();
parameters.put(QName.createQName(NamespaceService.BPM_MODEL_1_0_URI, "assignees"), (Serializable)bpm_assignees);
parameters.put(QName.createQName(NamespaceService.DEFAULT_URI, "testNode"), testNodeRef);
WorkflowPath path = workflowComponent.startWorkflow(workflowDef.id, parameters);
assertNotNull(path);
List<WorkflowTask> tasks = workflowComponent.getTasksForWorkflowPath(path.id);
assertNotNull(tasks);
assertEquals(1, tasks.size());
WorkflowTask updatedTask = taskComponent.endTask(tasks.get(0).id, "multi");
assertNotNull(updatedTask);
}
public void testEndTask()
{
WorkflowDefinition workflowDef = getTestDefinition();