Investigation of build issues:

* Revert to 5.0.b-SNAPSHOT (so enterprise build works)
  * convert public job to use FTS rather than Lucene



git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/modules/recordsmanagement/HEAD@89169 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Roy Wetherall
2014-10-27 02:48:49 +00:00
parent 4e542a6a60
commit b47290b010
3 changed files with 61 additions and 35 deletions

View File

@@ -66,7 +66,7 @@
</modules> </modules>
<properties> <properties>
<alfresco.base.version>5.0.b</alfresco.base.version> <alfresco.base.version>5.0.b-SNAPSHOT</alfresco.base.version>
<!-- Database properties - default values to be overridden in settings.xml --> <!-- Database properties - default values to be overridden in settings.xml -->
<db.driver>org.postgresql.Driver</db.driver> <db.driver>org.postgresql.Driver</db.driver>

View File

@@ -34,7 +34,9 @@ import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef; import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.search.QueryConsistency;
import org.alfresco.service.cmr.search.ResultSet; import org.alfresco.service.cmr.search.ResultSet;
import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.service.cmr.search.SearchService; import org.alfresco.service.cmr.search.SearchService;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@@ -128,37 +130,41 @@ public class PublishUpdatesJobExecuter extends RecordsManagementJobExecuter
{ {
if (nodeService.exists(nodeRef)) if (nodeService.exists(nodeRef))
{ {
// Mark the update node as publishing in progress boolean publishing = (Boolean)nodeService.getProperty(nodeRef, PROP_PUBLISH_IN_PROGRESS);
markPublishInProgress(nodeRef); if (!publishing)
try
{ {
Date start = new Date(); // Mark the update node as publishing in progress
if (logger.isDebugEnabled()) markPublishInProgress(nodeRef);
try
{ {
logger.debug("Starting publish of updates ..."); Date start = new Date();
logger.debug(" - for " + nodeRef.toString()); if (logger.isDebugEnabled())
logger.debug(" - at " + start.toString()); {
logger.debug("Starting publish of updates ...");
logger.debug(" - for " + nodeRef.toString());
logger.debug(" - at " + start.toString());
}
// Publish updates
publishUpdates(nodeRef);
if (logger.isDebugEnabled())
{
Date end = new Date();
long duration = end.getTime() - start.getTime();
logger.debug("Completed publish of updates ...");
logger.debug(" - for " + nodeRef.toString());
logger.debug(" - at " + end.toString());
logger.debug(" - duration " + Long.toString(duration));
}
} }
finally
// Publish updates
publishUpdates(nodeRef);
if (logger.isDebugEnabled())
{ {
Date end = new Date(); // Ensure the update node has either completed the publish or is marked as no longer in progress
long duration = end.getTime() - start.getTime(); unmarkPublishInProgress(nodeRef);
logger.debug("Completed publish of updates ...");
logger.debug(" - for " + nodeRef.toString());
logger.debug(" - at " + end.toString());
logger.debug(" - duration " + Long.toString(duration));
} }
} }
finally
{
// Ensure the update node has either completed the publish or is marked as no longer in progress
unmarkPublishInProgress(nodeRef);
}
} }
} }
} }
@@ -205,8 +211,7 @@ public class PublishUpdatesJobExecuter extends RecordsManagementJobExecuter
{ {
// Build the query string // Build the query string
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
sb.append("+ASPECT:\"rma:").append(ASPECT_UNPUBLISHED_UPDATE.getLocalName()).append("\" "); sb.append("ASPECT:\"rma:").append(ASPECT_UNPUBLISHED_UPDATE.getLocalName()).append("\"");
sb.append("@rma\\:").append(PROP_PUBLISH_IN_PROGRESS.getLocalName()).append(":false ");
String query = sb.toString(); String query = sb.toString();
if (logger.isDebugEnabled()) if (logger.isDebugEnabled())
@@ -216,17 +221,32 @@ public class PublishUpdatesJobExecuter extends RecordsManagementJobExecuter
// Execute query to find updates awaiting publishing // Execute query to find updates awaiting publishing
List<NodeRef> resultNodes = null; List<NodeRef> resultNodes = null;
ResultSet results = searchService.query(
StoreRef.STORE_REF_WORKSPACE_SPACESSTORE, SearchParameters searchParameters = new SearchParameters();
SearchService.LANGUAGE_LUCENE, searchParameters.setQueryConsistency(QueryConsistency.TRANSACTIONAL);
query); searchParameters.setQuery(query);
searchParameters.addStore(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
searchParameters.setLanguage(SearchService.LANGUAGE_FTS_ALFRESCO);
try try
{ {
resultNodes = results.getNodeRefs(); ResultSet results = searchService.query(searchParameters);
try
{
resultNodes = results.getNodeRefs();
}
finally
{
results.close();
}
} }
finally catch (AlfrescoRuntimeException exception)
{ {
results.close(); if (logger.isDebugEnabled())
{
logger.debug("Error executing query, " + exception.getMessage());
}
throw exception;
} }
if (logger.isDebugEnabled()) if (logger.isDebugEnabled())

View File

@@ -981,9 +981,15 @@ public class DispositionServiceImplTest extends BaseRMTestCase
@Override @Override
public void test(DispositionActionDefinition result) throws Exception public void test(DispositionActionDefinition result) throws Exception
{ {
DispositionActionDefinition actionDefinition = testRM386DispositionSchedule.getDispositionActionDefinitionByName("cutoff");
assertNotNull(actionDefinition);
assertTrue(nodeService.hasAspect(actionDefinition.getNodeRef(), ASPECT_UNPUBLISHED_UPDATE));
// Publish the updates // Publish the updates
PublishUpdatesJobExecuter updater = (PublishUpdatesJobExecuter)applicationContext.getBean("publishUpdatesJobExecuter"); PublishUpdatesJobExecuter updater = (PublishUpdatesJobExecuter)applicationContext.getBean("publishUpdatesJobExecuter");
updater.executeImpl(); updater.executeImpl();
assertFalse(nodeService.hasAspect(actionDefinition.getNodeRef(), ASPECT_UNPUBLISHED_UPDATE));
// Check the record has been updated // Check the record has been updated
DispositionAction dispositionAction = dispositionService.getNextDispositionAction(testRM386Record); DispositionAction dispositionAction = dispositionService.getNextDispositionAction(testRM386Record);