Merged V4.0-BUG-FIX to HEAD

33935: ALF-12854: SOLR - requires more detailed tracking information
   - Part 1: info reported for the SOLR SUMMARY action
   33936: ALF-11693: Treat logging.properties the same way as log4j.properties and excluded it from the core .jar
   33945: ALF-12867: RIP jmx-dumper.jar
   33946: Fix minor issue introduced in r33920
   33947: IT-6847 "Error displays on recent 4.0 TS upgrade while navigate to a document details page"
   - errors spotted on folder details & in document library list also resolved by this fix
   33951: ALF-12867: Fixed typo
   33955: Some unit tests and examples on how to implement file wiping or shredding
    - One API change: On EagerContentStoreCleaner, the following is now protected:
         protected boolean deleteFromStore(String contentUrl, ContentStore store)
    - Examples in unit test ContentStoreCleanerTest: testForcedImmediateShredding and testShreddingCleanup
   33962: RIP hyperic plugin
   33965: ALF-12697: Asynchronously get RSS items for dashlets on page load
   33969: Fix for ALF-12307 Solr backup fills the disk
   - number to Keep can be set by property/subsystem/Share admin
   - set default backup localtion
   33970: Fix for ALF-12854 SOLR - requires more detailed tracking information
   Part 2: basic stats added to JMX and improved statistical moment calculation
   33984: Update 4.0.1 installers to use Tomcat 6.0.35
   - Remembered to set useHttpOnly on <Context>
   - Also commented out JreMemoryLeakPreventionListener
   33988: ALF-12717	CIFS: Unfriendly message occurs when a user with consumer/contributor role tries to rename space.
   33997: ALF-12697: Remove async WebScript from dashlet family
   33999: Fix for ALF-12854 SOLR - requires more detailed tracking information
   - Final part - Expose via share admin
   34005: Fix for ALF-12708
   34007: Merged V3.4-BUG-FIX (3.4.9) to 3.4-BUG-FIX (4.0.1)
      34006: Merged V3.4 (3.4.8) to V3.4-BUG-FIX (3.4.9)
         34002: ALF-12898: MMT does not calculate the current version number correctly and does not report errors
            - ModuleManagementTool was constructing the wrong current war file version number.
              The minor and revision values were the wrong way around. 3.4.8 became 3.8.4
            - Modified unit test to use the actual version number in the test war 4.1.0 rather than 4.0.1
            - Modified the ModuleManagementTool so that problems would be reported to the user even if they did not have -verbose set
   34016: Update version.revision for 4.0.1
   34022: Merged V3.4-BUG-FIX to V4.0-BUG-FIX
      33952: ALF-5680: It is possible to cut/paste a locked file   
      34003: Merged DEV to V3.4-BUG-FIX
         34001: ALF-12709: CLONE - Run action Wizard. Can't run action.
            Fixed regression connected with separator of AVM path in .NodeRef. id that has been changed from .;. to .|.
   34023: Merged V3.4-BUG-FIX to V4.0-BUG-FIX (RECORD ONLY)
      33912: ALF-9899: Merged HEAD to V3.4-BUG-FIX (PARTIAL)
         31912: Merged DEV to HEAD
            31823: TransactionalCache provides REPEATABLE READ
                   - Values found in shared cache are placed into transactional cache
                   - Previously, values could keep changing until first write (READ COMMITTED)
                     but now the first read sets the value until it is changed by the current
                     transaction   
      33981: Updated version.revision for 3.4.9
      33985: ALF-12364: Merged V4.0-BUG-FIX to V3.4-BUG-FIX
         33984: Update 3.4.9 installers to use Tomcat 6.0.35
            - Remembered to set useHttpOnly on <Context>
            - Also commented out JreMemoryLeakPreventionListener


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@34024 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Dave Ward
2012-02-18 12:12:01 +00:00
parent 42f24b1c6a
commit 6f7095f450
17 changed files with 511 additions and 46 deletions

View File

@@ -37,6 +37,9 @@
<property name="remoteBackupLocation">
<value>${solr.backup.alfresco.remoteBackupLocation}</value>
</property>
<property name="numberToKeep">
<value>${solr.backup.alfresco.numberToKeep}</value>
</property>
<property name="core">
<value>alfresco</value>
</property>
@@ -79,6 +82,9 @@
</property>
<property name="remoteBackupLocation">
<value>${solr.backup.archive.remoteBackupLocation}</value>
</property>
<property name="numberToKeep">
<value>${solr.backup.archive.numberToKeep}</value>
</property>
<property name="core">
<value>archive</value>

View File

@@ -1,5 +1,8 @@
solr.backup.alfresco.cronExpression=0 0 2 * * ?
solr.backup.archive.cronExpression=0 0 4 * * ?
solr.backup.alfresco.remoteBackupLocation=
solr.backup.archive.remoteBackupLocation=
solr.backup.alfresco.remoteBackupLocation=${dir.root}/solrBackup/alfresco
solr.backup.archive.remoteBackupLocation=${dir.root}/solrBackup/archive
solr.backup.alfresco.numberToKeep=3
solr.backup.archive.numberToKeep=3

View File

@@ -6,7 +6,7 @@
version.major=4
version.minor=1
version.revision=0
version.revision=1
version.label=
# Edition label

View File

@@ -633,6 +633,7 @@ public class CifsHelper
/**
* Move a node
* @deprecated - not used by live code - exception handling is too severe
*
* @param nodeToMoveRef Node to be moved
* @param newParentNodeRef New parent folder node
@@ -661,6 +662,7 @@ public class CifsHelper
/**
* Rename a node
* @deprecated - not used by live code - exception handling is too severe
*
* @param nodeToRenameRef Node to be renamed
* @param newName New name for the node

View File

@@ -49,6 +49,7 @@ import org.alfresco.jlan.server.filesys.DiskFullException;
import org.alfresco.jlan.server.filesys.DiskInterface;
import org.alfresco.jlan.server.filesys.DiskSizeInterface;
import org.alfresco.jlan.server.filesys.FileAttribute;
import org.alfresco.jlan.server.filesys.FileExistsException;
import org.alfresco.jlan.server.filesys.FileInfo;
import org.alfresco.jlan.server.filesys.FileName;
import org.alfresco.jlan.server.filesys.FileOpenParams;
@@ -56,6 +57,7 @@ import org.alfresco.jlan.server.filesys.FileStatus;
import org.alfresco.jlan.server.filesys.IOControlNotImplementedException;
import org.alfresco.jlan.server.filesys.IOCtlInterface;
import org.alfresco.jlan.server.filesys.NetworkFile;
import org.alfresco.jlan.server.filesys.PermissionDeniedException;
import org.alfresco.jlan.server.filesys.SearchContext;
import org.alfresco.jlan.server.filesys.SrvDiskInfo;
import org.alfresco.jlan.server.filesys.TreeConnection;
@@ -1338,11 +1340,12 @@ public class ContentDiskDriver2 extends AlfrescoDiskDriver implements ExtendedD
// Rename or move the file/folder to another folder
if (sameFolder == true)
{
getCifsHelper().rename(nodeToMoveRef, name);
fileFolderService.rename(nodeToMoveRef, name);
}
else
{
getCifsHelper().move(nodeToMoveRef, sourceFolderRef, targetFolderRef, name);
fileFolderService.moveFrom(nodeToMoveRef, sourceFolderRef, targetFolderRef, name);
}
if (logger.isDebugEnabled())
@@ -1354,10 +1357,7 @@ public class ContentDiskDriver2 extends AlfrescoDiskDriver implements ExtendedD
else
{
// Rename a file within the same folder
//
// Check if the target file already exists
final int newExists = fileExists(session, tree, newName);
if (logger.isDebugEnabled())
{
logger.debug(
@@ -1369,39 +1369,54 @@ public class ContentDiskDriver2 extends AlfrescoDiskDriver implements ExtendedD
" Node: " + nodeToMoveRef + "\n" +
" Aspects: " + nodeService.getAspects(nodeToMoveRef));
}
getCifsHelper().rename(nodeToMoveRef, name);
fileFolderService.rename(nodeToMoveRef, name);
}
}
catch (org.alfresco.service.cmr.model.FileNotFoundException e)
{
if (logger.isDebugEnabled())
{
logger.debug("Rename file - about to throw file not exists exception file:" + oldName, e);
}
throw new java.io.FileNotFoundException("renameFile: file not found file: + oldName");
}
catch (org.alfresco.service.cmr.model.FileExistsException e)
{
if (logger.isDebugEnabled())
{
logger.debug("Rename file - about to throw file exists exception", e);
}
throw new org.alfresco.jlan.server.filesys.FileExistsException(newName);
}
catch (org.alfresco.repo.security.permissions.AccessDeniedException ex)
{
if (logger.isDebugEnabled())
{
logger.debug("Rename file - access denied, " + oldName);
logger.debug("Rename file - about to throw permissions denied exception", ex);
}
// Convert to a filesystem access denied status
throw new AccessDeniedException("Rename file " + oldName);
throw new org.alfresco.jlan.server.filesys.PermissionDeniedException("renameFile: No permissions to rename file:" + oldName);
}
catch (NodeLockedException ex)
{
if (logger.isDebugEnabled())
{
logger.debug("Rename file", ex);
logger.debug("Rename file - about to throw access denied exception", ex);
}
// Convert to an filesystem access denied exception
throw new AccessDeniedException("Node locked " + oldName);
throw new AccessDeniedException("renameFile: Access Denied - Node locked file:" + oldName);
}
catch (AlfrescoRuntimeException ex)
{
if (logger.isDebugEnabled())
{
logger.debug("Rename file", ex);
logger.debug("Rename file about to throw access denied exception", ex);
}
throw new AlfrescoRuntimeException("renameFile failed: \n" +
" Old name: " + oldName + "\n" +
" New name: " + newName + "\n" +
ex);
// Convert to a general I/O exception
throw new AccessDeniedException("Rename file " + oldName);
}
}

View File

@@ -370,21 +370,142 @@ public class ContentStoreCleanerTest extends TestCase
assertTrue("Content listener was not called", deletedUrls.contains(reader.getContentUrl()));
}
public void testProtectedRemoval() throws Exception
/**
* Test forced and immediate shredding of content
* <p/>
* There is no validation that the file is affected. It's an example of how to wire
* different listeners together to do neat things to the files before deletion.
*/
public void testForcedImmediateShredding() throws Exception
{
cleaner.setProtectDays(1);
// add some content to the store
ContentWriter writer = store.getWriter(ContentStore.NEW_CONTENT_CONTEXT);
writer.putContent("ABC");
String contentUrl = writer.getContentUrl();
// An example of an eager cleaner that will wipe files before deleting their contents
// This is very much like a listener, but listeners are only called by the standard,
// scheduled cleaner.
final Set<String> wipedUrls = new HashSet<String>(3);
final EagerContentStoreCleaner wipingEagerCleaner = new EagerContentStoreCleaner()
{
final FileWipingContentCleanerListener fileWiper = new FileWipingContentCleanerListener();
@Override
protected boolean deleteFromStore(String contentUrl, ContentStore store)
{
fileWiper.beforeDelete(store, contentUrl);
wipedUrls.add(contentUrl);
return true;
}
};
wipingEagerCleaner.setStores(Collections.singletonList(store));
/*
* Note that we don't need to wire the 'wipingEagerCleaner' into anything.
* You can if you want it to wipe for all use cases. In this case, we're just
* going to manually force it to clean.
*/
// fire the cleaner
// Create a node with content
final StoreRef storeRef = nodeService.createStore("test", getName() + "-" + GUID.generate());
RetryingTransactionCallback<NodeRef> testCallback = new RetryingTransactionCallback<NodeRef>()
{
public NodeRef execute() throws Throwable
{
// Create some content
NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
Map<QName, Serializable> properties = new HashMap<QName, Serializable>(13);
properties.put(ContentModel.PROP_NAME, (Serializable)"test.txt");
NodeRef contentNodeRef = nodeService.createNode(
rootNodeRef,
ContentModel.ASSOC_CHILDREN,
ContentModel.ASSOC_CHILDREN,
ContentModel.TYPE_CONTENT,
properties).getChildRef();
ContentWriter writer = contentService.getWriter(contentNodeRef, ContentModel.PROP_CONTENT, true);
writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
writer.putContent("INITIAL CONTENT");
// Done
return contentNodeRef;
}
};
final NodeRef contentNodeRef = transactionService.getRetryingTransactionHelper().doInTransaction(testCallback);
// Now, force the node to be deleted and make sure it gets cleaned up directly
// This can be used where some sensitive data has been identified and, before deletion,
// the URL can be marked for immediate cleanup (in the post-commit phase, of course!)
RetryingTransactionCallback<String> deleteCallback = new RetryingTransactionCallback<String>()
{
public String execute() throws Throwable
{
// Let's pretend we're in 'beforeDeleteNode'
ContentReader reader = contentService.getReader(contentNodeRef, ContentModel.PROP_CONTENT);
String contentUrl = reader.getContentUrl();
wipingEagerCleaner.registerOrphanedContentUrl(contentUrl, true);
nodeService.deleteNode(contentNodeRef);
// Done
return contentUrl;
}
};
String contentUrl = transactionService.getRetryingTransactionHelper().doInTransaction(deleteCallback);
// So, we don't fire the cleaner, but notice that the eager cleaner has 'wiped' the content
assertTrue("Expected our URL to have been wiped.", wipedUrls.contains(contentUrl));
cleaner.execute();
// the content should have disappeared as it is not in the database
assertTrue("Protected content was deleted", store.exists(contentUrl));
assertFalse("Content listener was called with deletion of protected URL", deletedUrls.contains(contentUrl));
}
/**
* Test basic wiping of file contents on normal orphan cleanup
*/
public void testShreddingCleanup() throws Exception
{
eagerCleaner.setEagerOrphanCleanup(false);
cleaner.setProtectDays(0);
// Add in a the Wiping cleaner listener
FileWipingContentCleanerListener fileWiper = new FileWipingContentCleanerListener();
List<ContentStoreCleanerListener> listeners = new ArrayList<ContentStoreCleanerListener>(1);
listeners.add(fileWiper);
eagerCleaner.setListeners(listeners);
// Create a node with content
final StoreRef storeRef = nodeService.createStore("test", getName() + "-" + GUID.generate());
RetryingTransactionCallback<NodeRef> testCallback = new RetryingTransactionCallback<NodeRef>()
{
public NodeRef execute() throws Throwable
{
// Create some content
NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
Map<QName, Serializable> properties = new HashMap<QName, Serializable>(13);
properties.put(ContentModel.PROP_NAME, (Serializable)"test.txt");
NodeRef contentNodeRef = nodeService.createNode(
rootNodeRef,
ContentModel.ASSOC_CHILDREN,
ContentModel.ASSOC_CHILDREN,
ContentModel.TYPE_CONTENT,
properties).getChildRef();
ContentWriter writer = contentService.getWriter(contentNodeRef, ContentModel.PROP_CONTENT, true);
writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
writer.putContent("INITIAL CONTENT");
// Done
return contentNodeRef;
}
};
final NodeRef contentNodeRef = transactionService.getRetryingTransactionHelper().doInTransaction(testCallback);
// Simple delete
RetryingTransactionCallback<Void> deleteCallback = new RetryingTransactionCallback<Void>()
{
public Void execute() throws Throwable
{
nodeService.deleteNode(contentNodeRef);
// Done
return null;
}
};
transactionService.getRetryingTransactionHelper().doInTransaction(deleteCallback);
// It's orphaned now. Fire the cleaner.
cleaner.execute();
}
private class DummyCleanerListener implements ContentStoreCleanerListener
{
public void beforeDelete(ContentStore store, String contentUrl) throws ContentIOException

View File

@@ -143,6 +143,8 @@ public class EagerContentStoreCleaner extends TransactionListenerAdapter
* <p/>
* <b>NB: </b>Any content registered <u>will</u> be deleted if the current transaction
* commits and if 'eager' cleanup is turned on.
* <p/>
* Note that listeners are not called for this process.
*
* @return Returns <tt>true</tt> if the content was scheduled for post-transaction deletion.
* If the return value is <tt>true</tt> then the calling code <b>must</b> delete
@@ -157,7 +159,9 @@ public class EagerContentStoreCleaner extends TransactionListenerAdapter
* Queues orphaned content for post-transaction removal
* <p/>
* <b>NB: </b>Any content registered <u>will</u> be deleted if the current transaction
* commits and if 'eager' cleanup is turned on.
* commits and if 'eager' cleanup is turned on OR if 'force' is <tt>true</tt>.
* <p/>
* Note that listeners are not called for this process.
*
* @param force <tt>true</tt> for force the post-commit URL deletion
* regardless of the setting {@link #setEagerOrphanCleanup(boolean)}.
@@ -246,6 +250,8 @@ public class EagerContentStoreCleaner extends TransactionListenerAdapter
/**
* Delete the content URL from all stores
* <p/>
* Note that listeners <b>are</b> called for this process.
*
* @param contentUrl the URL to delete
* @return Returns <tt>true</tt> if all deletes were successful
@@ -298,7 +304,7 @@ public class EagerContentStoreCleaner extends TransactionListenerAdapter
/**
* Attempts to delete the URL from the store, catching and reporing errors.
*/
private boolean deleteFromStore(String contentUrl, ContentStore store)
protected boolean deleteFromStore(String contentUrl, ContentStore store)
{
try
{

View File

@@ -0,0 +1,144 @@
/*
* Copyright (C) 2005-2012 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.repo.content.cleanup;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.content.filestore.FileContentReader;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Simple listener that overwrites files with zeros.
* <p/>
* Wire this into the {@link EagerContentStoreCleaner} as a listener and it will
* ensure that files have their contents overwritten with zeros before deletion.
* Note that this process does not affect the content lifecycyle in any way
* i.e. content will still follow the same orphan path as before.
* <p>
* Clearly wiring this up with a {@link DeletedContentBackupCleanerListener} is
* pointless as you will be making a copy of the before wiping it or end up
* copying a file full of zero depending on the order of the listeners.
*
* @author Derek Hulley
* @since 4.0.1
*/
public class FileWipingContentCleanerListener implements ContentStoreCleanerListener
{
private static Log logger = LogFactory.getLog(FileWipingContentCleanerListener.class);
public FileWipingContentCleanerListener()
{
}
public void beforeDelete(ContentStore sourceStore, String contentUrl) throws ContentIOException
{
// First check if the content is present at all
ContentReader reader = sourceStore.getReader(contentUrl);
if (reader != null && reader.exists())
{
// Call to implementation's shred
if (logger.isDebugEnabled())
{
logger.debug(
"About to shread: \n" +
" URL: " + contentUrl + "\n" +
" Source: " + sourceStore);
}
try
{
shred(reader);
}
catch (Throwable e)
{
logger.error(
"Content shredding failed: \n" +
" URL: " + contentUrl + "\n" +
" Source: " + sourceStore + "\n" +
" Reader: " + reader,
e);
}
}
else
{
logger.error(
"Content no longer exists. Unable to shred: \n" +
" URL: " + contentUrl + "\n" +
" Source: " + sourceStore);
}
}
/**
* Override to perform shredding on disparate forms of readers. This implementation will,
* by default, identify more specific readers and make calls for those.
*
* @param reader the reader to the content needing shredding
* @exception IOException any IO error
*/
protected void shred(ContentReader reader) throws IOException
{
if (reader instanceof FileContentReader)
{
FileContentReader fileReader = (FileContentReader) reader;
File file = fileReader.getFile();
shred(file);
}
}
/**
* Called by {@link #shred(ContentReader)} when the reader points to a physical file.
* The default implementation simply overwrites the content with zeros.
*
* @param file the file to shred before deletion
* @throws IOException any IO error
*/
protected void shred(File file) throws IOException
{
// Double check
if (!file.exists() || !file.canWrite())
{
throw new ContentIOException("Unable to write to file: " + file);
}
long bytes = file.length();
OutputStream os = new BufferedOutputStream(new FileOutputStream(file));
try
{
/*
* There are many more efficient ways of writing bytes into the file.
* However, it is likely that implementations will do a lot more than
* just overwrite with zeros.
*/
for (int i = 0; i < bytes; i++)
{
os.write(0);
}
}
finally
{
try {os.close(); } catch (Throwable e) {}
}
}
}

View File

@@ -348,4 +348,46 @@ public class LockBehaviourImplTest extends BaseSpringTest
// TODO check that delete is also working
}
/**
* ALF-5680: It is possible to cut/paste a locked file
*/
public void testCannotMoveNodeWhenLocked()
{
TestWithUserUtils.authenticateUser(GOOD_USER_NAME, PWD, rootNodeRef, this.authenticationService);
// Create the node that we'll try to move
NodeRef parentNode = this.nodeRef;
ChildAssociationRef childAssocRef = nodeService.createNode(
parentNode,
ContentModel.ASSOC_CONTAINS,
QName.createQName("{test}nodeServiceLockTest"),
ContentModel.TYPE_CONTENT);
NodeRef nodeRef = childAssocRef.getChildRef();
// Lock it - so that it can't be moved.
this.lockService.lock(nodeRef, LockType.WRITE_LOCK);
// Create the new container that we'll move the node to.
NodeRef newParentRef = nodeService.createNode(
parentNode,
ContentModel.ASSOC_CONTAINS,
QName.createQName("{test}nodeServiceLockTest"),
ContentModel.TYPE_CONTAINER).getChildRef();
// Now the bad user will try to move the node.
TestWithUserUtils.authenticateUser(BAD_USER_NAME, PWD, rootNodeRef, this.authenticationService);
try
{
nodeService.moveNode(
nodeRef,
newParentRef,
ContentModel.ASSOC_CONTAINS,
QName.createQName("{test}nodeServiceLockTest"));
fail("Shouldn't have been able to move locked node.");
}
catch (NodeLockedException e)
{
// Good, we can't move it - as expected.
}
}
}

View File

@@ -67,6 +67,7 @@ public class LockServiceImpl implements LockService,
NodeServicePolicies.OnCreateChildAssociationPolicy,
NodeServicePolicies.BeforeUpdateNodePolicy,
NodeServicePolicies.BeforeDeleteNodePolicy,
NodeServicePolicies.OnMoveNodePolicy,
CopyServicePolicies.OnCopyNodePolicy,
VersionServicePolicies.BeforeCreateVersionPolicy,
VersionServicePolicies.OnCreateVersionPolicy
@@ -124,6 +125,10 @@ public class LockServiceImpl implements LockService,
NodeServicePolicies.BeforeDeleteNodePolicy.QNAME,
ContentModel.ASPECT_LOCKABLE,
new JavaBehaviour(this, "beforeDeleteNode"));
this.policyComponent.bindClassBehaviour(
NodeServicePolicies.OnMoveNodePolicy.QNAME,
ContentModel.ASPECT_LOCKABLE,
new JavaBehaviour(this, "onMoveNode"));
// Register copy class behaviour
this.policyComponent.bindClassBehaviour(
@@ -637,4 +642,11 @@ public class LockServiceImpl implements LockService,
"\" +@\\{http\\://www.alfresco.org/model/content/1.0\\}" + ContentModel.PROP_LOCK_OWNER.getLocalName() + ":\"" + getUserName() + "\"" +
" +@\\{http\\://www.alfresco.org/model/content/1.0\\}" + ContentModel.PROP_LOCK_TYPE.getLocalName() + ":\"" + lockType.toString() + "\"");
}
@Override
public void onMoveNode(ChildAssociationRef oldChildAssocRef, ChildAssociationRef newChildAssocRef)
{
NodeRef nodeRef = oldChildAssocRef.getChildRef();
checkForLock(nodeRef);
}
}

View File

@@ -718,28 +718,54 @@ public class ModuleManagementTool implements LogOutput
}
/**
* Outputs a message the console (in verbose mode) and the logger.
* Outputs a message the console (in verbose mode).
*
* @param message the message to output
*/
private void outputMessage(String message)
{
outputMessage(message, false);
outputMessage(message, false, false);
}
/**
* Outputs a message the console (in verbose mode) and the logger.
* Outputs a message the console (in verbose mode).
*
* @param message the message to output
* @prarm indent indicates that the message should be formated with an indent
*/
private void outputErrorMessage(String message)
{
outputMessage(message, false, true);
}
/**
* Outputs a message the console (in verbose mode).
*
* @param message the message to output
* @param indent indicates that the message should be formated with an indent
*/
private void outputMessage(String message, boolean indent)
{
outputMessage(message, indent, false);
}
/**
* Outputs a message the console. Errors are always output, but others are only output in verbose mode.
*
* @param message the message to output
* @param indent indicates that the message should be formated with an indent
* @param error indicates that the message is an error.
*/
private void outputMessage(String message, boolean indent, boolean error)
{
if (indent == true)
{
message = " - " + message;
}
if (this.verbose == true)
if (error)
{
System.err.println(message);
}
else if (this.verbose == true)
{
System.out.println(message);
}
@@ -857,7 +883,7 @@ public class ModuleManagementTool implements LogOutput
catch (ModuleManagementToolException e)
{
// These are user-friendly
manager.outputMessage(e.getMessage());
manager.outputErrorMessage(e.getMessage());
outputUsage();
System.exit(ERROR_EXIT_CODE);
}

View File

@@ -259,7 +259,7 @@ public class ModuleManagementToolTest extends TestCase
{
manager.setVerbose(true);
String warLocation = getFileLocation(".war", "module/test.war");
String warLocation = getFileLocation(".war", "module/test.war"); //Version 4.0.1
String ampLocation = getFileLocation(".amp", "module/test_v4.amp");
try

View File

@@ -40,7 +40,7 @@ public class WarHelperImpl implements WarHelper
if (propsFile != null && propsFile.exists())
{
Properties warVers = loadProperties(propsFile);
VersionNumber warVersion = new VersionNumber(warVers.getProperty("version.major")+"."+warVers.getProperty("version.revision")+"."+warVers.getProperty("version.minor"));
VersionNumber warVersion = new VersionNumber(warVers.getProperty("version.major")+"."+warVers.getProperty("version.minor")+"."+warVers.getProperty("version.revision"));
if(warVersion.compareTo(installingModuleDetails.getRepoVersionMin())==-1) {
throw new ModuleManagementToolException("The module ("+installingModuleDetails.getTitle()+") must be installed on a repo version greater than "+installingModuleDetails.getRepoVersionMin());
}

View File

@@ -48,6 +48,8 @@ public class SolrBackupClient implements InitializingBean
private String remoteBackupLocation;
private int numberToKeep;
private String core;
private SOLRAdminClient solrAdminClient;
@@ -73,6 +75,14 @@ public class SolrBackupClient implements InitializingBean
this.remoteBackupLocation = remoteBackupLocation;
}
/**
* @param numberToKeep the numberToKeep to set
*/
public void setNumberToKeep(int numberToKeep)
{
this.numberToKeep = numberToKeep;
}
public void execute()
{
@@ -132,6 +142,7 @@ public class SolrBackupClient implements InitializingBean
params.set("qt", "/"+core+"/replication");
params.set("command", "backup");
params.set("location", remoteBackupLocation);
params.set("numberToKeep", numberToKeep);
QueryResponse response = solrAdminClient.query(params);

View File

@@ -44,20 +44,46 @@ public class SolrChildApplicationContextFactory extends ChildApplicationContextF
private static String ALFRESCO_LAG_DURATION = "tracker.alfresco.lag.duration";
private static String ALFRESCO_LAST_INDEXED_TXN = "tracker.alfresco.last.indexed.txn";
private static String ALFRESCO_APPROX_TXNS_REMAINING = "tracker.alfresco.approx.txns.remaining";
private static String ALFRESCO_APPROX_INDEXING_TIME_REMAINING = "tracker.alfresco.approx.indexing.time.remaining";
private static String ARCHIVE_ACTIVE = "tracker.archive.active";
private static String ARCHIVE_LAG = "tracker.archive.lag";
private static String ARCHIVE_LAG_DURATION = "tracker.archive.lag.duration";
private static String ARCHIVE_LAST_INDEXED_TXN = "tracker.archive.last.indexed.txn";
private static String ARCHIVE_APPROX_TXNS_REMAINING = "tracker.archive.approx.txns.remaining";
private static String ARCHIVE_APPROX_INDEXING_TIME_REMAINING = "tracker.archive.approx.indexing.time.remaining";
@Override
public boolean isUpdateable(String name)
{
// TODO Auto-generated method stub
return super.isUpdateable(name)
&& !name.equals(SolrChildApplicationContextFactory.ALFRESCO_ACTIVE) && !name.equals(SolrChildApplicationContextFactory.ALFRESCO_LAG)
&& !name.equals(SolrChildApplicationContextFactory.ALFRESCO_LAG_DURATION) && !name.equals(SolrChildApplicationContextFactory.ARCHIVE_ACTIVE)
&& !name.equals(SolrChildApplicationContextFactory.ARCHIVE_LAG) && !name.equals(SolrChildApplicationContextFactory.ARCHIVE_LAG_DURATION);
&& !name.equals(SolrChildApplicationContextFactory.ALFRESCO_ACTIVE)
&& !name.equals(SolrChildApplicationContextFactory.ALFRESCO_LAG)
&& !name.equals(SolrChildApplicationContextFactory.ALFRESCO_LAG_DURATION)
&& !name.equals(SolrChildApplicationContextFactory.ALFRESCO_LAST_INDEXED_TXN)
&& !name.equals(SolrChildApplicationContextFactory.ALFRESCO_APPROX_TXNS_REMAINING)
&& !name.equals(SolrChildApplicationContextFactory.ALFRESCO_APPROX_INDEXING_TIME_REMAINING)
&& !name.equals(SolrChildApplicationContextFactory.ARCHIVE_ACTIVE)
&& !name.equals(SolrChildApplicationContextFactory.ARCHIVE_LAG)
&& !name.equals(SolrChildApplicationContextFactory.ARCHIVE_LAG_DURATION)
&& !name.equals(SolrChildApplicationContextFactory.ARCHIVE_APPROX_TXNS_REMAINING)
&& !name.equals(SolrChildApplicationContextFactory.ARCHIVE_APPROX_INDEXING_TIME_REMAINING)
&& !name.equals(SolrChildApplicationContextFactory.ARCHIVE_LAST_INDEXED_TXN)
;
}
@Override
@@ -81,12 +107,18 @@ public class SolrChildApplicationContextFactory extends ChildApplicationContextF
String alfrescoLag = alfresco.getString("Lag");
String alfrescoActive = alfresco.getString("Active");
String alfrescoDuration = alfresco.getString("Duration");
String alfrescoLastIndexedTxn = alfresco.getString("Id for last TX in index");
String alfrescoApproxTxnsReminaing = alfresco.getString("Approx transactions remaining");
String alfrescoApproxIndexingTimeReminaing = alfresco.getString("Approx indexing time remaining");
JSONObject archive = summary.getJSONObject("archive");
String archiveLag = archive.getString("Lag");
String archiveActive = archive.getString("Active");
String archiveDuration = archive.getString("Duration");
String archiveLastIndexedTxn = archive.getString("Id for last TX in index");
String archiveApproxTxnsReminaing = archive.getString("Approx transactions remaining");
String archiveApproxIndexingTimeReminaing = archive.getString("Approx indexing time remaining");
if (name.equals(SolrChildApplicationContextFactory.ALFRESCO_ACTIVE))
{
@@ -100,6 +132,18 @@ public class SolrChildApplicationContextFactory extends ChildApplicationContextF
{
return alfrescoDuration;
}
else if (name.equals(SolrChildApplicationContextFactory.ALFRESCO_LAST_INDEXED_TXN))
{
return alfrescoLastIndexedTxn;
}
else if (name.equals(SolrChildApplicationContextFactory.ALFRESCO_APPROX_TXNS_REMAINING))
{
return alfrescoApproxTxnsReminaing;
}
else if (name.equals(SolrChildApplicationContextFactory.ALFRESCO_APPROX_INDEXING_TIME_REMAINING))
{
return alfrescoApproxIndexingTimeReminaing;
}
else if (name.equals(SolrChildApplicationContextFactory.ARCHIVE_ACTIVE))
{
return archiveActive;
@@ -112,6 +156,18 @@ public class SolrChildApplicationContextFactory extends ChildApplicationContextF
{
return archiveDuration;
}
else if (name.equals(SolrChildApplicationContextFactory.ARCHIVE_LAST_INDEXED_TXN))
{
return archiveLastIndexedTxn;
}
else if (name.equals(SolrChildApplicationContextFactory.ARCHIVE_APPROX_TXNS_REMAINING))
{
return archiveApproxTxnsReminaing;
}
else if (name.equals(SolrChildApplicationContextFactory.ARCHIVE_APPROX_INDEXING_TIME_REMAINING))
{
return archiveApproxIndexingTimeReminaing;
}
else
{
return "Unavailable";
@@ -139,9 +195,16 @@ public class SolrChildApplicationContextFactory extends ChildApplicationContextF
result.add(SolrChildApplicationContextFactory.ALFRESCO_ACTIVE);
result.add(SolrChildApplicationContextFactory.ALFRESCO_LAG);
result.add(SolrChildApplicationContextFactory.ALFRESCO_LAG_DURATION);
result.add(SolrChildApplicationContextFactory.ALFRESCO_LAST_INDEXED_TXN);
result.add(SolrChildApplicationContextFactory.ALFRESCO_APPROX_TXNS_REMAINING);
result.add(SolrChildApplicationContextFactory.ALFRESCO_APPROX_INDEXING_TIME_REMAINING);
result.add(SolrChildApplicationContextFactory.ARCHIVE_ACTIVE);
result.add(SolrChildApplicationContextFactory.ARCHIVE_LAG);
result.add(SolrChildApplicationContextFactory.ARCHIVE_LAG_DURATION);
result.add(SolrChildApplicationContextFactory.ARCHIVE_LAST_INDEXED_TXN);
result.add(SolrChildApplicationContextFactory.ARCHIVE_APPROX_TXNS_REMAINING);
result.add(SolrChildApplicationContextFactory.ARCHIVE_APPROX_INDEXING_TIME_REMAINING);
result.addAll(super.getPropertyNames());
return result;
}

View File

@@ -151,4 +151,10 @@ public interface SOLRTrackingComponent
* @return
*/
public Long getMaxTxnCommitTime();
/**
* Get the last transaction id from the repo
* @return
*/
public Long getMaxTxnId();
}

View File

@@ -886,4 +886,12 @@ public class SOLRTrackingComponentImpl implements SOLRTrackingComponent
nodeDAO.setCheckNodeConsistency();
return nodeDAO.getMaxTxnCommitTime();
}
@Override
public Long getMaxTxnId()
{
long maxCommitTime = System.currentTimeMillis()+1L;
nodeDAO.setCheckNodeConsistency();
return nodeDAO.getMaxTxnIdByCommitTime(maxCommitTime);
}
}