Merged V2.2 to HEAD

7534: Merged V2.1 to HEAD
      7398: XPath metadata extractor selector handles malformed and empty XML files
      7401: Fix AR-1879: JBPM Timer never fires
      7413: Contribution: Integrity checker ignores exceptions that would normally trigger transaction retries.
      7416: AR-1884.Unicode wildcard processing.
      7417: Added filtering of pseudo files when a partial wildcard search path is used, such as '*.csv'. AR-1889.
      7436: AR-1863: major version's can now be created via the web service API;
      7451: Fix for handling of UTF-8 application/x-www-form-urlencoded encoded form arguments as raised in support ticket 242
      7458: Fix for AR-1900
      7520: Fix to Template API where content was not retrievable from custom d:content properties on a node


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@8413 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley
2008-03-03 13:35:10 +00:00
parent 9fe5e55dc8
commit c920bfb309
11 changed files with 149 additions and 28 deletions

View File

@@ -844,9 +844,36 @@ public class ContentDiskDriver extends AlfrescoDiskDriver implements DiskInterfa
if ( WildCard.containsWildcards(searchFileSpec)) if ( WildCard.containsWildcards(searchFileSpec))
{ {
// Check if the folder has any associated pseudo files // Get the list of pseudo files for the search path
pseudoList = searchFolderState.getPseudoFileList(); pseudoList = searchFolderState.getPseudoFileList();
// Check if the wildcard is for all files or a subset
if ( searchFileSpec.equals( "*") == false && pseudoList != null && pseudoList.numberOfFiles() > 0)
{
// Generate a subset of pseudo files that match the wildcard search pattern
WildCard wildCard = new WildCard( searchFileSpec, false);
PseudoFileList filterList = null;
for ( int i = 0; i > pseudoList.numberOfFiles(); i++)
{
PseudoFile pseudoFile = pseudoList.getFileAt( i);
if ( wildCard.matchesPattern( pseudoFile.getFileName()))
{
// Add the pseudo file to the filtered list
if ( filterList == null)
filterList = new PseudoFileList();
filterList.addFile( pseudoFile);
}
}
// Use the filtered pseudo file list, or null if there were no matches
pseudoList = filterList;
}
} }
else if ( results == null || results.size() == 0) else if ( results == null || results.size() == 0)
{ {

View File

@@ -114,6 +114,19 @@ public class XmlMetadataExtracter extends AbstractMappingMetadataExtracter
Map<QName, Serializable> destination, Map<QName, Serializable> destination,
Map<String, Set<QName>> mapping) Map<String, Set<QName>> mapping)
{ {
// Check the content length
if (reader.getSize() == 0)
{
// There is no content. We don't spoof any properties so there can be nothing extracted.
if (logger.isDebugEnabled())
{
logger.debug("\n" +
"XML document has zero length, so bypassing extraction: \n" +
" Document: " + reader);
}
return destination;
}
MetadataExtracter extracter = null; MetadataExtracter extracter = null;
// Select a worker // Select a worker
for (ContentWorkerSelector<MetadataExtracter> selector : selectors) for (ContentWorkerSelector<MetadataExtracter> selector : selectors)
@@ -123,6 +136,10 @@ public class XmlMetadataExtracter extends AbstractMappingMetadataExtracter
{ {
extracter = selector.getWorker(spawnedReader); extracter = selector.getWorker(spawnedReader);
} }
catch (Throwable e)
{
// The selector failed, so try another
}
finally finally
{ {
if (reader.isChannelOpen()) if (reader.isChannelOpen())
@@ -149,8 +166,14 @@ public class XmlMetadataExtracter extends AbstractMappingMetadataExtracter
// Did we find anything? // Did we find anything?
if (extracter == null) if (extracter == null)
{ {
if (logger.isDebugEnabled())
{
logger.debug("\n" +
"No working metadata extractor could be found: \n" +
" Document: " + reader);
}
// There will be no properties extracted // There will be no properties extracted
modifiedProperties = Collections.emptyMap(); modifiedProperties = destination;
} }
else else
{ {
@@ -176,7 +199,7 @@ public class XmlMetadataExtracter extends AbstractMappingMetadataExtracter
"XML metadata extractor redirected: \n" + "XML metadata extractor redirected: \n" +
" Reader: " + reader + "\n" + " Reader: " + reader + "\n" +
" Extracter: " + extracter + "\n" + " Extracter: " + extracter + "\n" +
" Extracted: " + modifiedProperties); " Metadata: " + modifiedProperties);
} }
return modifiedProperties; return modifiedProperties;
} }

View File

@@ -67,6 +67,8 @@ public class XmlMetadataExtracterTest extends TestCase
{ {
private static final String FILE_ALFRESCO_MODEL = "xml-metadata/alfresco-model-sample.xml"; private static final String FILE_ALFRESCO_MODEL = "xml-metadata/alfresco-model-sample.xml";
private static final String FILE_ECLIPSE_PROJECT = "xml-metadata/eclipse-project-sample.xml"; private static final String FILE_ECLIPSE_PROJECT = "xml-metadata/eclipse-project-sample.xml";
private static final String FILE_EMPTY = "xml-metadata/empty-sample.xml";
private static final String FILE_MALFORMED = "xml-metadata/malformed-sample.xml";
private static final String CTX_LOCATION = "classpath:xml-metadata/xml-metadata-test-context.xml"; private static final String CTX_LOCATION = "classpath:xml-metadata/xml-metadata-test-context.xml";
private static final ApplicationContext ctx = new ClassPathXmlApplicationContext(CTX_LOCATION); private static final ApplicationContext ctx = new ClassPathXmlApplicationContext(CTX_LOCATION);
@@ -157,6 +159,38 @@ public class XmlMetadataExtracterTest extends TestCase
assertEquals("JavaCC Nature", checkProperties.get(ContentModel.PROP_DESCRIPTION)); assertEquals("JavaCC Nature", checkProperties.get(ContentModel.PROP_DESCRIPTION));
} }
public void testEmptyFile() throws Exception
{
// Get an empty file
ContentReader reader = getReader(FILE_EMPTY);
assertTrue(reader.exists());
// Pass it to the extracter
PropertyMap checkProperties = new PropertyMap();
checkProperties.put(ContentModel.PROP_TITLE, getName());
xmlMetadataExtracter.extract(reader, checkProperties);
// The map should be unaffected
assertNotNull("Properties changed by empty file extraction", checkProperties.get(ContentModel.PROP_TITLE));
assertEquals("Properties changed by empty file extraction", getName(), checkProperties.get(ContentModel.PROP_TITLE));
}
public void testMalformedFile() throws Exception
{
// Get an empty file
ContentReader reader = getReader(FILE_MALFORMED);
assertTrue(reader.exists());
// Pass it to the extracter
PropertyMap checkProperties = new PropertyMap();
checkProperties.put(ContentModel.PROP_TITLE, getName());
xmlMetadataExtracter.extract(reader, checkProperties);
// The map should be unaffected
assertNotNull("Properties changed by malformed file extraction", checkProperties.get(ContentModel.PROP_TITLE));
assertEquals("Properties changed by malformed file extraction", getName(), checkProperties.get(ContentModel.PROP_TITLE));
}
public void testRootElementNameSelector() throws Exception public void testRootElementNameSelector() throws Exception
{ {
// Load the example files // Load the example files

View File

@@ -35,6 +35,7 @@ import org.alfresco.repo.node.NodeServicePolicies;
import org.alfresco.repo.policy.JavaBehaviour; import org.alfresco.repo.policy.JavaBehaviour;
import org.alfresco.repo.policy.PolicyComponent; import org.alfresco.repo.policy.PolicyComponent;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport; import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.service.cmr.dictionary.AspectDefinition; import org.alfresco.service.cmr.dictionary.AspectDefinition;
import org.alfresco.service.cmr.dictionary.AssociationDefinition; import org.alfresco.service.cmr.dictionary.AssociationDefinition;
import org.alfresco.service.cmr.dictionary.ClassDefinition; import org.alfresco.service.cmr.dictionary.ClassDefinition;
@@ -670,6 +671,24 @@ public class IntegrityChecker
} }
catch (Throwable e) catch (Throwable e)
{ {
// This means that integrity checking itself failed. This is serious.
// There are some exceptions that can be handled by transaction retries, so
// we attempt to handle these and let them get out to trigger the retry.
// Thanks to Carina Lansing.
Throwable retryThrowable = RetryingTransactionHelper.extractRetryCause(e);
if (retryThrowable != null)
{
// The transaction will be retrying on this, so there's no need for the aggressive
// reporting that would normally happen
if (e instanceof RuntimeException)
{
throw (RuntimeException) e;
}
else
{
throw new RuntimeException(e);
}
}
e.printStackTrace(); e.printStackTrace();
// log it as an error and move to next event // log it as an error and move to next event
IntegrityRecord exceptionRecord = new IntegrityRecord("" + e.getMessage()); IntegrityRecord exceptionRecord = new IntegrityRecord("" + e.getMessage());

View File

@@ -353,7 +353,7 @@ public class RetryingTransactionHelper
* @param cause the cause to examine * @param cause the cause to examine
* @return Returns the original cause if it is a valid retry cause, otherwise <tt>null</tt> * @return Returns the original cause if it is a valid retry cause, otherwise <tt>null</tt>
*/ */
private Throwable extractRetryCause(Throwable cause) public static Throwable extractRetryCause(Throwable cause)
{ {
Throwable retryCause = ExceptionStackUtil.getCause(cause, RETRY_EXCEPTIONS); Throwable retryCause = ExceptionStackUtil.getCause(cause, RETRY_EXCEPTIONS);
if (retryCause == null) if (retryCause == null)

View File

@@ -457,6 +457,8 @@ public class WorkflowInterpreter extends BaseInterpreter
} }
} }
List<WorkflowTimer> timers = new ArrayList<WorkflowTimer>();
if (id.equals("all")) if (id.equals("all"))
{ {
for (WorkflowDefinition def : workflowService.getAllDefinitions()) for (WorkflowDefinition def : workflowService.getAllDefinitions())
@@ -464,11 +466,7 @@ public class WorkflowInterpreter extends BaseInterpreter
List<WorkflowInstance> workflows = workflowService.getActiveWorkflows(def.id); List<WorkflowInstance> workflows = workflowService.getActiveWorkflows(def.id);
for (WorkflowInstance workflow : workflows) for (WorkflowInstance workflow : workflows)
{ {
List<WorkflowTimer> timers = workflowService.getTimers(workflow.id); timers.addAll(workflowService.getTimers(workflow.id));
for (WorkflowTimer timer : timers)
{
out.println("id: " + timer.id + " , name: " + timer.name + " , due date: " + timer.dueDate + " , path: " + timer.path.id + " , node: " + timer.path.node.name + " , process: " + timer.path.instance.id + " , task: " + timer.task.name + "(" + timer.task.id + ")");
}
} }
} }
} }
@@ -477,11 +475,22 @@ public class WorkflowInterpreter extends BaseInterpreter
List<WorkflowInstance> workflows = workflowService.getActiveWorkflows(id); List<WorkflowInstance> workflows = workflowService.getActiveWorkflows(id);
for (WorkflowInstance workflow : workflows) for (WorkflowInstance workflow : workflows)
{ {
List<WorkflowTimer> timers = workflowService.getTimers(workflow.id); timers.addAll(workflowService.getTimers(workflow.id));
}
}
for (WorkflowTimer timer : timers) for (WorkflowTimer timer : timers)
{ {
out.println("id: " + timer.id + " , name: " + timer.name + " , due date: " + timer.dueDate + " , path: " + timer.path.id + " , node: " + timer.path.node.name + " , process: " + timer.path.instance.id + " , task: " + timer.task.name + "(" + timer.task.id + ")"); out.print("id: " + timer.id + " , name: " + timer.name + " , due date: " + timer.dueDate + " , path: " + timer.path.id + " , node: " + timer.path.node.name + " , process: " + timer.path.instance.id);
if (timer.task != null)
{
out.print(" , task: " + timer.task.name + "(" + timer.task.id + ")");
} }
out.println();
if (timer.error != null)
{
out.println("error executing timer id " + timer.id);
out.println(timer.error);
} }
} }
} }

View File

@@ -84,12 +84,6 @@ public class AlfrescoTimer extends Timer
} }
// execute timer // execute timer
if (username == null)
{
executeResult = super.execute(jbpmContext);
}
else
{
executeResult = AuthenticationUtil.runAs(new RunAsWork<Boolean>() executeResult = AuthenticationUtil.runAs(new RunAsWork<Boolean>()
{ {
@SuppressWarnings("synthetic-access") @SuppressWarnings("synthetic-access")
@@ -97,8 +91,7 @@ public class AlfrescoTimer extends Timer
{ {
return AlfrescoTimer.super.execute(jbpmContext); return AlfrescoTimer.super.execute(jbpmContext);
} }
}, username); }, (username == null) ? "system" : username);
}
return executeResult; return executeResult;
} }

View File

@@ -2868,9 +2868,14 @@ public class JBPMEngine extends BPMEngine
WorkflowTimer workflowTimer = new WorkflowTimer(); WorkflowTimer workflowTimer = new WorkflowTimer();
workflowTimer.id = createGlobalId(new Long(timer.getId()).toString()); workflowTimer.id = createGlobalId(new Long(timer.getId()).toString());
workflowTimer.name = timer.getName(); workflowTimer.name = timer.getName();
workflowTimer.error = timer.getException();
workflowTimer.dueDate = timer.getDueDate(); workflowTimer.dueDate = timer.getDueDate();
workflowTimer.path = createWorkflowPath(timer.getToken()); workflowTimer.path = createWorkflowPath(timer.getToken());
workflowTimer.task = createWorkflowTask(timer.getTaskInstance()); TaskInstance taskInstance = timer.getTaskInstance();
if (taskInstance != null)
{
workflowTimer.task = createWorkflowTask(taskInstance);
}
return workflowTimer; return workflowTimer;
} }

View File

@@ -43,6 +43,9 @@ public class WorkflowTimer
/** Due Date */ /** Due Date */
public Date dueDate; public Date dueDate;
/** Error */
public String error;
/* /*
* (non-Javadoc) * (non-Javadoc)
* *

View File

@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>Repository</name>
<comment>JavaCC Nature</comment>
<projects>
</projects>
<buildSpec>
<buildCommand>