diff --git a/config/alfresco/content-services-context.xml b/config/alfresco/content-services-context.xml
index 2700d5f0d3..57634868a1 100644
--- a/config/alfresco/content-services-context.xml
+++ b/config/alfresco/content-services-context.xml
@@ -87,6 +87,8 @@
+
+
@@ -110,10 +112,15 @@
+
-
+
+
+
+
+
@@ -180,9 +187,15 @@
parent="baseContentTransformer" />
+ class="org.alfresco.repo.content.transform.OpenOfficeContentTransformer"
+ parent="baseContentTransformer" >
+
+
+
+
+ classpath:alfresco/mimetype/openoffice-document-formats.xml
+
+ imconvert "${source}" ${options} "${target}"
- imconvert ${source} ${options} ${target}
+ convert ${source} ${options} ${target}
diff --git a/config/alfresco/core-services-context.xml b/config/alfresco/core-services-context.xml
index fc2743c1ba..b9d5061865 100644
--- a/config/alfresco/core-services-context.xml
+++ b/config/alfresco/core-services-context.xml
@@ -76,6 +76,7 @@
alfresco.messages.template-servicealfresco.messages.lock-servicealfresco.messages.patch-service
+ alfresco.messages.webdav-messages
@@ -185,7 +186,7 @@
+ class="org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory2">
@@ -348,15 +349,12 @@
-
-
+
+
-
-
-
@@ -482,6 +480,12 @@
+
+
+
+
+
+
@@ -562,7 +566,7 @@
+ class="org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory2$LuceneIndexBackupComponent">
diff --git a/config/alfresco/ehcache-default.xml b/config/alfresco/ehcache-default.xml
index be8b13c6b0..0ed693e163 100644
--- a/config/alfresco/ehcache-default.xml
+++ b/config/alfresco/ehcache-default.xml
@@ -49,7 +49,7 @@
timeToLiveSeconds="0"
overflowToDisk="false"
/>
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ${dir.indexes}
+
+
+
+
+
+
+
+
+ ${lucene.maxAtomicTransformationTime}
+
+
+ ${lucene.query.maxClauses}
+
+
+ ${lucene.indexer.batchSize}
+
+
+ ${lucene.indexer.minMergeDocs}
+
+
+ ${lucene.indexer.mergeFactor}
+
+
+ ${lucene.indexer.maxMergeDocs}
+
+
+ ${dir.indexes.lock}
+
+
+ ${lucene.indexer.maxFieldLength}
+
+
+ ${lucene.write.lock.timeout}
+
+
+ ${lucene.commit.lock.timeout}
+
+
+ ${lucene.lock.poll.interval}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ${dir.root}/backup-lucene-indexes
+
+
+
+
\ No newline at end of file
diff --git a/config/alfresco/hibernate-context.xml b/config/alfresco/hibernate-context.xml
index fc2befda7b..12564b8c0a 100644
--- a/config/alfresco/hibernate-context.xml
+++ b/config/alfresco/hibernate-context.xml
@@ -32,6 +32,10 @@
+
+
+
+
org/alfresco/repo/domain/hibernate/Node.hbm.xmlorg/alfresco/repo/domain/hibernate/Store.hbm.xmlorg/alfresco/repo/domain/hibernate/VersionCount.hbm.xml
diff --git a/config/alfresco/messages/patch-service.properties b/config/alfresco/messages/patch-service.properties
index 64d5461e5c..17eac74a1a 100644
--- a/config/alfresco/messages/patch-service.properties
+++ b/config/alfresco/messages/patch-service.properties
@@ -69,6 +69,6 @@ patch.scriptsFolder.result.exists=The scripts folder already exists: {0}
patch.scriptsFolder.result.created=The scripts folder was successfully created: {0}
patch.topLevelGroupParentChildAssociationTypePatch.description=Ensure top level groups have the correct child association type.
-patch.topLevelGroupParentChildAssociationTypePatch.=Fixed top level groups child association type.
-
-
+patch.topLevelGroupParentChildAssociationTypePatch.result=Fixed {0} top level groups child association types.
+patch.topLevelGroupParentChildAssociationTypePatch.err.sys_path_not_found=Required authority system path not found: {0}
+patch.topLevelGroupParentChildAssociationTypePatch.err.auth_path_not_found=Required authority path not found: {0}
diff --git a/config/alfresco/messages/version-service.properties b/config/alfresco/messages/version-service.properties
index 6d7a741cfb..24462679e1 100644
--- a/config/alfresco/messages/version-service.properties
+++ b/config/alfresco/messages/version-service.properties
@@ -6,3 +6,5 @@ version_service.err_unsupported=The current implementation of the version servic
version_service.err_one_preceeding=The current implementation of the version service only supports one preceeding version.
version_service.err_restore_no_version=The node {0} cannot be restore since there is no version information available for this node.
version_service.err_revert_mismatch=The version provided to revert to does not come from the nodes version history.
+version_service.initial_version=Initial version
+version_service.auto_version=Auto version
diff --git a/config/alfresco/messages/webdav-messages.properties b/config/alfresco/messages/webdav-messages.properties
new file mode 100644
index 0000000000..f4903d25e0
--- /dev/null
+++ b/config/alfresco/messages/webdav-messages.properties
@@ -0,0 +1,13 @@
+# webdav HTML page messages
+
+webdav.repository_title=Alfresco Content Repository
+webdav.directory_listing=Directory listing for
+webdav.column.name=Name
+webdav.column.size=Size
+webdav.column.type=Type
+webdav.column.modifieddate=Modified Date
+webdav.column.navigate_up=Up a level
+webdav.err.dir=An error occurred whilst generating the directory listing, please contact the system administrator.
+webdav.size.bytes=bytes
+webdav.size.kilobytes=Kb
+webdav.size.megabytes=Mb
diff --git a/config/alfresco/mimetype/openoffice-document-formats.xml b/config/alfresco/mimetype/openoffice-document-formats.xml
new file mode 100644
index 0000000000..62a0040d75
--- /dev/null
+++ b/config/alfresco/mimetype/openoffice-document-formats.xml
@@ -0,0 +1,154 @@
+
+
+
+
+
+ Portable Document Format
+ application/pdf
+ pdf
+
+ Presentationimpress_pdf_Export
+ Spreadsheetcalc_pdf_Export
+ Textwriter_pdf_Export
+
+
+
+ Macromedia Flash
+ application/x-shockwave-flash
+ swf
+
+ Presentationimpress_flash_Export
+
+
+
+
+ HTML
+ text/html
+ html
+
+ Presentationimpress_html_Export
+ SpreadsheetHTML (StarCalc)
+ TextHTML (StarWriter)
+
+
+
+
+
+ OpenDocument Text
+ Text
+ application/vnd.oasis.opendocument.text
+ odt
+
+ Textwriter8
+
+
+
+ OpenOffice.org 1.0 Text Document
+ Text
+ application/vnd.sun.xml.writer
+ sxw
+
+ TextStarOffice XML (Writer)
+
+
+
+ Microsoft Word
+ Text
+ application/msword
+ doc
+
+ TextMS Word 97
+
+
+
+ WordPerfect
+ Text
+ application/wordperfect
+ wpd
+
+ TextWordPerfect
+
+
+
+ Rich Text Format
+ Text
+ text/rtf
+ rtf
+
+ TextRich Text Format
+
+
+
+ Plain Text
+ Text
+ text/plain
+ txt
+
+ TextText
+
+
+
+
+
+ OpenDocument Spreadsheet
+ Spreadsheet
+ application/vnd.oasis.opendocument.spreadsheet
+ ods
+
+ Spreadsheetcalc8
+
+
+
+ OpenOffice.org 1.0 Spreadsheet
+ Spreadsheet
+ application/vnd.sun.xml.calc
+ sxc
+
+ SpreadsheetStarOffice XML (Calc)
+
+
+
+ Microsoft Excel
+ Spreadsheet
+ application/application/vnd.excel
+ xls
+
+ SpreadsheetMS Excel 97
+
+
+
+
+
+
+ OpenDocument Presentation
+ Presentation
+ application/vnd.oasis.opendocument.presentation
+ odp
+
+ Presentationimpress8
+
+
+
+ OpenOffice.org 1.0 Presentation
+ Presentation
+ application/vnd.sun.xml.impress
+ sxi
+
+ PresentationStarOffice XML (Impress)
+
+
+
+ Microsoft PowerPoint
+ Presentation
+ application/vnd.powerpoint
+ ppt
+
+ PresentationMS PowerPoint 97
+
+
+
+
\ No newline at end of file
diff --git a/config/alfresco/model/contentModel.xml b/config/alfresco/model/contentModel.xml
index 025bed35a3..06ea1289ff 100644
--- a/config/alfresco/model/contentModel.xml
+++ b/config/alfresco/model/contentModel.xml
@@ -230,21 +230,25 @@
Createdd:datetimetrue
+ trueCreatord:texttrue
+ trueModifiedd:datetimetrue
+ trueModifierd:texttrue
+ trueAccessed
@@ -645,6 +649,29 @@
+
+ Emailed
+
+
+ Originator
+ d:text
+
+
+ Addressee
+ d:text
+
+
+ Addressees
+ d:text
+ true
+
+
+ Sent Date
+ d:datetime
+
+
+
+
diff --git a/config/alfresco/workflow-context.xml b/config/alfresco/workflow-context.xml
new file mode 100644
index 0000000000..98abbb5311
--- /dev/null
+++ b/config/alfresco/workflow-context.xml
@@ -0,0 +1,19 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/source/java/org/alfresco/filesys/ftp/FTPDataSession.java b/source/java/org/alfresco/filesys/ftp/FTPDataSession.java
index a0acf05f02..407b5cbc0f 100644
--- a/source/java/org/alfresco/filesys/ftp/FTPDataSession.java
+++ b/source/java/org/alfresco/filesys/ftp/FTPDataSession.java
@@ -19,6 +19,19 @@ package org.alfresco.filesys.ftp;
import java.net.*;
import java.io.*;
+import org.alfresco.filesys.server.SrvSession;
+import org.alfresco.filesys.server.filesys.AccessMode;
+import org.alfresco.filesys.server.filesys.DiskDeviceContext;
+import org.alfresco.filesys.server.filesys.DiskInterface;
+import org.alfresco.filesys.server.filesys.FileAction;
+import org.alfresco.filesys.server.filesys.FileOpenParams;
+import org.alfresco.filesys.server.filesys.FileStatus;
+import org.alfresco.filesys.server.filesys.NetworkFile;
+import org.alfresco.filesys.server.filesys.NotifyChange;
+import org.alfresco.filesys.server.filesys.TreeConnection;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
/**
* FTP Data Session Class
*
@@ -30,9 +43,16 @@ import java.io.*;
*
* @author GKSpencer
*/
-public class FTPDataSession implements Runnable
+public class FTPDataSession extends SrvSession implements Runnable
{
+ // Debug logging
+ private static final Log logger = LogFactory.getLog("org.alfresco.ftp.protocol");
+
+ // Data session command types
+
+ public enum DataCommand { StoreFile, ReturnFile };
+
// FTP session that this data connection is associated with
private FTPSrvSession m_cmdSess;
@@ -54,10 +74,6 @@ public class FTPDataSession implements Runnable
private ServerSocket m_passiveSock;
- // Adapter to bind the passive socket to
-
- private InetAddress m_bindAddr;
-
// Transfer in progress and abort file transfer flags
private boolean m_transfer;
@@ -66,7 +82,27 @@ public class FTPDataSession implements Runnable
// Send/receive data byte count
private long m_bytCount;
+
+ // Data command type
+
+ private DataCommand m_dataCmd;
+
+ // Requested file name
+
+ private String m_reqFileName;
+
+ // Path to the local file
+
+ private FTPPath m_ftpPath;
+ // Restart position
+
+ private long m_restartPos;
+
+ // Thread that runs the data command
+
+ private Thread m_dataThread;
+
/**
* Class constructor
*
@@ -77,7 +113,10 @@ public class FTPDataSession implements Runnable
*/
protected FTPDataSession(FTPSrvSession sess) throws IOException
{
-
+ // Setup the base class
+
+ super( -1, sess.getServer(), "FTPDATA", null);
+
// Set the associated command session
m_cmdSess = sess;
@@ -100,6 +139,9 @@ public class FTPDataSession implements Runnable
*/
protected FTPDataSession(FTPSrvSession sess, int localPort, InetAddress bindAddr) throws IOException
{
+ // Setup the base class
+
+ super( -1, sess.getServer(), "FTPDATA", null);
// Set the associated command session
@@ -124,6 +166,9 @@ public class FTPDataSession implements Runnable
*/
protected FTPDataSession(FTPSrvSession sess, InetAddress bindAddr) throws IOException
{
+ // Setup the base class
+
+ super( -1, sess.getServer(), "FTPDATA", null);
// Set the associated command session
@@ -146,6 +191,9 @@ public class FTPDataSession implements Runnable
*/
protected FTPDataSession(FTPSrvSession sess, InetAddress addr, int port)
{
+ // Setup the base class
+
+ super( -1, sess.getServer(), "FTPDATA", null);
// Set the associated command session
@@ -171,6 +219,9 @@ public class FTPDataSession implements Runnable
*/
protected FTPDataSession(FTPSrvSession sess, int localPort, InetAddress addr, int port)
{
+ // Setup the base class
+
+ super( -1, sess.getServer(), "FTPDATA", null);
// Set the associated command session
@@ -271,6 +322,16 @@ public class FTPDataSession implements Runnable
return m_transfer;
}
+ /**
+ * Determine if the transfer has been aborted
+ *
+ * @return boolean
+ */
+ public final boolean isTransferAborted()
+ {
+ return m_abort;
+ }
+
/**
* Abort an in progress file transfer
*/
@@ -358,12 +419,524 @@ public class FTPDataSession implements Runnable
}
m_passiveSock = null;
}
+
+ // Commit, or rollback, any active user transaction
+
+ try
+ {
+ // Commit or rollback the transaction
+
+ endTransaction();
+ }
+ catch ( Exception ex)
+ {
+ // Debug
+
+ if ( logger.isDebugEnabled())
+ logger.debug("Error committing transaction", ex);
+ }
}
+ /**
+ * Store a file using a seperate thread to receive the data and write the file
+ *
+ * @param ftpPath FTPPath
+ */
+ public final void doStoreFile( FTPPath ftpPath, long restartPos, String reqFileName)
+ {
+ // Set the transfer details
+
+ m_dataCmd = DataCommand.StoreFile;
+ m_ftpPath = ftpPath;
+ m_restartPos = restartPos;
+ m_reqFileName = reqFileName;
+
+ // Run the transfer in a seperate thread
+
+ m_dataThread = new Thread(this);
+ m_dataThread.setName(m_cmdSess.getUniqueId() + "_DATA_STORE");
+ m_dataThread.start();
+ }
+
+ /**
+ * Return a file using a seperate thread to read the file and send the data
+ *
+ * @param ftpPath FTPPath
+ */
+ public final void doReturnFile( FTPPath ftpPath, long restartPos, String reqFileName)
+ {
+ // Set the transfer details
+
+ m_dataCmd = DataCommand.ReturnFile;
+ m_ftpPath = ftpPath;
+ m_restartPos = restartPos;
+ m_reqFileName = reqFileName;
+
+ // Run the transfer in a seperate thread
+
+ m_dataThread = new Thread(this);
+ m_dataThread.setName(m_cmdSess.getUniqueId() + "_DATA_RETURN");
+ m_dataThread.start();
+ }
+
/**
* Run a file send/receive in a seperate thread
*/
public void run()
{
+ // Setup the authentication context as we are running in a seperate thread from the main FTP session
+
+ try
+ {
+ // Setup the authentication context for the thread
+
+ m_cmdSess.authenticateDataSession();
+
+ // Run the required data command
+
+ switch ( m_dataCmd)
+ {
+ // Store a file
+
+ case StoreFile:
+ runStoreFile();
+ break;
+
+ // Return a file
+
+ case ReturnFile:
+ runReturnFile();
+ break;
+ }
+ }
+ catch ( org.alfresco.repo.security.authentication.AuthenticationException ex)
+ {
+ if ( logger.isErrorEnabled())
+ logger.error("Failed to authenticate FTP data session", ex);
+
+ // Close the data connection to the client
+
+ m_cmdSess.getFTPServer().releaseDataSession(this);
+ closeSession();
+ }
}
+
+ /**
+ * Return a file to the client
+ */
+ private final void runReturnFile()
+ {
+ // Send the file to the client
+
+ OutputStream os = null;
+ DiskInterface disk = null;
+ TreeConnection tree = null;
+ NetworkFile netFile = null;
+ Socket dataSock = null;
+
+ try
+ {
+
+ // Open an output stream to the client
+
+ dataSock = getSocket();
+ os = dataSock.getOutputStream();
+
+ // Create a temporary tree connection
+
+ tree = m_cmdSess.getTreeConnection(m_ftpPath.getSharedDevice());
+
+ // Check if the file exists and it is a file, if so then open the
+ // file
+
+ disk = (DiskInterface) m_ftpPath.getSharedDevice().getInterface();
+
+ // Create the file open parameters
+
+ FileOpenParams params = new FileOpenParams(m_ftpPath.getSharePath(), FileAction.OpenIfExists,
+ AccessMode.ReadOnly, 0);
+
+ // Check if the file exists and it is a file
+
+ int sts = disk.fileExists( this, tree, m_ftpPath.getSharePath());
+
+ if (sts == FileStatus.FileExists)
+ {
+
+ // Open the file
+
+ netFile = disk.openFile( this, tree, params);
+ }
+
+ // Check if the file has been opened
+
+ if (netFile == null)
+ {
+ m_cmdSess.sendFTPResponse(550, "File " + m_reqFileName + " not available");
+ return;
+ }
+
+ // Allocate the buffer for the file data
+
+ byte[] buf = new byte[FTPSrvSession.DEFAULT_BUFFERSIZE];
+ long filePos = m_restartPos;
+
+ int len = -1;
+
+ while (filePos < netFile.getFileSize())
+ {
+
+ // Read another block of data from the file
+
+ len = disk.readFile( this, tree, netFile, buf, 0, buf.length, filePos);
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_FILEIO))
+ logger.debug(" Write len=" + len + " bytes");
+
+ // Write the current data block to the client, update the file position
+
+ if (len > 0)
+ {
+
+ // Write the data to the client
+
+ os.write(buf, 0, len);
+
+ // Update the file position
+
+ filePos += len;
+
+ // Update the transfer byte count
+
+ m_bytCount += len;
+ }
+
+ // Check if the transfer has been aborted
+
+ if ( isTransferAborted())
+ {
+ // DEBUG
+
+ if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_FILE))
+ logger.debug(" Transfer aborted (RETR)");
+
+ // Send a status to the client
+
+ sendFTPResponse( 226, "Aborted data connection");
+
+ // Finally block will cleanup
+
+ return;
+ }
+ }
+
+ // Close the output stream to the client
+
+ os.close();
+ os = null;
+
+ // Indicate that the file has been transmitted
+
+ sendFTPResponse(226, "Closing data connection");
+
+ // Close the data session
+
+ m_cmdSess.getFTPServer().releaseDataSession(this);
+
+ // Close the network file
+
+ disk.closeFile( this, tree, netFile);
+ netFile = null;
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_FILEIO))
+ logger.debug(" Transfer complete, file closed");
+ }
+ catch (SocketException ex)
+ {
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_ERROR))
+ logger.debug(" Error during transfer", ex);
+
+ // Indicate that there was an error during transmission of the file
+ // data
+
+ sendFTPResponse(426, "Data connection closed by client");
+ }
+ catch (Exception ex)
+ {
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_ERROR))
+ logger.debug(" Error during transfer", ex);
+
+ // Indicate that there was an error during transmission of the file
+ // data
+
+ sendFTPResponse(426, "Error during transmission");
+ }
+ finally
+ {
+ try
+ {
+ // Close the network file
+
+ if (netFile != null && disk != null && tree != null)
+ disk.closeFile(m_cmdSess, tree, netFile);
+
+ // Close the output stream to the client
+
+ if (os != null)
+ os.close();
+
+ // Close the data connection to the client
+
+ m_cmdSess.getFTPServer().releaseDataSession( this);
+ closeSession();
+ }
+ catch (Exception ex)
+ {
+ if ( logger.isErrorEnabled())
+ logger.error( "Error during FTP data session close", ex);
+ }
+ }
+ }
+
+ /**
+ * Store a file received from the client
+ */
+ private final void runStoreFile()
+ {
+ // Store the file from the client
+
+ InputStream is = null;
+ DiskInterface disk = null;
+ TreeConnection tree = null;
+ NetworkFile netFile = null;
+ Socket dataSock = null;
+
+ try
+ {
+
+ // Create a temporary tree connection
+
+ tree = m_cmdSess.getTreeConnection(m_ftpPath.getSharedDevice());
+
+ // Check if the session has the required access to the filesystem
+
+ if (tree == null || tree.hasWriteAccess() == false)
+ {
+
+ // Session does not have write access to the filesystem
+
+ sendFTPResponse(550, "Access denied");
+ return;
+ }
+
+ // Check if the file exists
+
+ disk = (DiskInterface) m_ftpPath.getSharedDevice().getInterface();
+ int sts = disk.fileExists(this, tree, m_ftpPath.getSharePath());
+
+ if (sts == FileStatus.DirectoryExists)
+ {
+
+ // Return an error status
+
+ sendFTPResponse(500, "Invalid path (existing directory)");
+ return;
+ }
+
+ // Create the file open parameters
+
+ FileOpenParams params = new FileOpenParams(m_ftpPath.getSharePath(),
+ sts == FileStatus.FileExists ? FileAction.TruncateExisting : FileAction.CreateNotExist,
+ AccessMode.ReadWrite, 0);
+
+ // Create a new file to receive the data
+
+ if (sts == FileStatus.FileExists)
+ {
+
+ // Overwrite the existing file
+
+ netFile = disk.openFile(this, tree, params);
+ }
+ else
+ {
+
+ // Create a new file
+
+ netFile = disk.createFile(this, tree, params);
+ }
+
+ // Notify change listeners that a new file has been created
+
+ DiskDeviceContext diskCtx = (DiskDeviceContext) tree.getContext();
+
+ if (diskCtx.hasChangeHandler())
+ diskCtx.getChangeHandler().notifyFileChanged(NotifyChange.ActionAdded, m_ftpPath.getSharePath());
+
+ // Send the intermediate response
+
+ sendFTPResponse(150, "File status okay, about to open data connection");
+
+ // Get the data connection socket
+
+ try
+ {
+ dataSock = getSocket();
+ }
+ catch (Exception ex)
+ {
+ }
+
+ if (dataSock == null)
+ {
+ sendFTPResponse(426, "Connection closed; transfer aborted");
+ return;
+ }
+
+ // Open an input stream from the client
+
+ is = dataSock.getInputStream();
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_FILE))
+ logger.debug("Storing ftp="
+ + m_ftpPath.getFTPPath() + ", share=" + m_ftpPath.getShareName() + ", path="
+ + m_ftpPath.getSharePath());
+
+ // Allocate the buffer for the file data
+
+ byte[] buf = new byte[FTPSrvSession.DEFAULT_BUFFERSIZE];
+ long filePos = 0;
+ int len = is.read(buf, 0, buf.length);
+
+ while (len > 0)
+ {
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_FILEIO))
+ logger.debug(" Receive len=" + len + " bytes");
+
+ // Write the current data block to the file, update the file
+ // position
+
+ disk.writeFile(this, tree, netFile, buf, 0, len, filePos);
+ filePos += len;
+
+ // Read another block of data from the client
+
+ len = is.read(buf, 0, buf.length);
+ }
+
+ // Close the input stream from the client
+
+ is.close();
+ is = null;
+
+ // Close the network file
+
+ disk.closeFile(this, tree, netFile);
+ netFile = null;
+
+ // Commit the transaction now before notifying the client that the transfer is finished
+
+ endTransaction();
+
+ // Indicate that the file has been received
+
+ sendFTPResponse(226, "Closing data connection");
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_FILEIO))
+ logger.debug(" Transfer complete, file closed");
+ }
+ catch (SocketException ex)
+ {
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_ERROR))
+ logger.debug(" Error during transfer", ex);
+
+ // Indicate that there was an error during transmission of the file data
+
+ sendFTPResponse(426, "Data connection closed by client");
+ }
+ catch (Exception ex)
+ {
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && m_cmdSess.hasDebug(FTPSrvSession.DBG_ERROR))
+ logger.debug(" Error during transfer", ex);
+
+ // Indicate that there was an error during transmission of the file
+ // data
+
+ sendFTPResponse(426, "Error during transmission");
+ }
+ finally
+ {
+ try
+ {
+ // Close the network file
+
+ if (netFile != null && disk != null && tree != null)
+ disk.closeFile( this, tree, netFile);
+
+ // Close the input stream to the client
+
+ if (is != null)
+ is.close();
+
+ // Close the data connection to the client
+
+ m_cmdSess.getFTPServer().releaseDataSession(this);
+ closeSession();
+ }
+ catch (Exception ex)
+ {
+ if ( logger.isErrorEnabled())
+ logger.error( "Error during FTP data session close", ex);
+ }
+ }
+ }
+
+ /**
+ * Send an FTP response to the client via the command session
+ *
+ * @param stsCode int
+ * @param msg String
+ */
+ protected final void sendFTPResponse(int stsCode, String msg)
+ {
+ try
+ {
+ m_cmdSess.sendFTPResponse( stsCode, msg);
+ }
+ catch (Exception ex)
+ {
+ }
+ }
+
+ /**
+ * Return the client address
+ *
+ * @return InetAddress
+ */
+ public InetAddress getRemoteAddress() {
+ return m_cmdSess.getRemoteAddress();
+ }
}
diff --git a/source/java/org/alfresco/filesys/ftp/FTPDate.java b/source/java/org/alfresco/filesys/ftp/FTPDate.java
index 6e68d691eb..7436de2880 100644
--- a/source/java/org/alfresco/filesys/ftp/FTPDate.java
+++ b/source/java/org/alfresco/filesys/ftp/FTPDate.java
@@ -98,10 +98,10 @@ public class FTPDate
buf.append(hr);
buf.append(":");
- int sec = cal.get(Calendar.SECOND);
- if (sec < 10)
+ int min = cal.get(Calendar.MINUTE);
+ if (min < 10)
buf.append("0");
- buf.append(sec);
+ buf.append(min);
}
}
}
diff --git a/source/java/org/alfresco/filesys/ftp/FTPSrvSession.java b/source/java/org/alfresco/filesys/ftp/FTPSrvSession.java
index ee7993f36e..0b22b83443 100644
--- a/source/java/org/alfresco/filesys/ftp/FTPSrvSession.java
+++ b/source/java/org/alfresco/filesys/ftp/FTPSrvSession.java
@@ -67,8 +67,6 @@ import org.alfresco.service.transaction.TransactionService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import com.sun.star.uno.RuntimeException;
-
/**
* FTP Server Session Class
*
@@ -127,7 +125,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
// File transfer buffer size
- private static final int DEFAULT_BUFFERSIZE = 64000;
+ public static final int DEFAULT_BUFFERSIZE = 64000;
// Carriage return/line feed combination required for response messages
@@ -137,6 +135,10 @@ public class FTPSrvSession extends SrvSession implements Runnable
protected final static String LIST_OPTION_HIDDEN = "-a";
+ // Flag to control whether data transfers use a seperate thread
+
+ private static boolean UseThreadedDataTransfer = true;
+
// Session socket
private Socket m_sock;
@@ -235,14 +237,18 @@ public class FTPSrvSession extends SrvSession implements Runnable
if (m_dataSess != null)
{
+ // Abort any active transfer
+
+ m_dataSess.abortTransfer();
+
+ // Remove the data session
+
getFTPServer().releaseDataSession(m_dataSess);
m_dataSess = null;
}
// Close the socket first, if the client is still connected this should
- // allow the
- // input/output streams
- // to be closed
+ // allow the input/output streams to be closed
if (m_sock != null)
{
@@ -335,8 +341,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Set the default path for the session
*
- * @param rootPath
- * FTPPath
+ * @param rootPath FTPPath
*/
public final void setRootPath(FTPPath rootPath)
{
@@ -353,10 +358,8 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Get the path details for the current request
*
- * @param req
- * FTPRequest
- * @param filePath
- * boolean
+ * @param req FTPRequest
+ * @param filePath boolean
* @return FTPPath
*/
protected final FTPPath generatePathForRequest(FTPRequest req, boolean filePath)
@@ -367,12 +370,9 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Get the path details for the current request
*
- * @param req
- * FTPRequest
- * @param filePath
- * boolean
- * @param checkExists
- * boolean
+ * @param req FTPRequest
+ * @param filePath boolean
+ * @param checkExists boolean
* @return FTPPath
*/
protected final FTPPath generatePathForRequest(FTPRequest req, boolean filePath, boolean checkExists)
@@ -583,8 +583,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Convert a path string from share path seperators to FTP path seperators
*
- * @param path
- * String
+ * @param path String
* @return String
*/
protected final String convertToFTPSeperators(String path)
@@ -603,8 +602,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Find the required disk shared device
*
- * @param name
- * String
+ * @param name String
* @return DiskSharedDevice
*/
protected final DiskSharedDevice findShare(String name)
@@ -630,8 +628,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Set the binary mode flag
*
- * @param bin
- * boolean
+ * @param bin boolean
*/
protected final void setBinary(boolean bin)
{
@@ -641,10 +638,8 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Send an FTP command response
*
- * @param stsCode
- * int
- * @param msg
- * String
+ * @param stsCode int
+ * @param msg String
* @exception IOException
*/
protected final void sendFTPResponse(int stsCode, String msg) throws IOException
@@ -680,8 +675,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Send an FTP command response
*
- * @param msg
- * StringBuffer
+ * @param msg StringBuffer
* @exception IOException
*/
protected final void sendFTPResponse(StringBuffer msg) throws IOException
@@ -697,11 +691,29 @@ public class FTPSrvSession extends SrvSession implements Runnable
}
}
+ /**
+ * Send an FTP command response
+ *
+ * @param msg String
+ * @exception IOException
+ */
+ protected final void sendFTPResponse(String msg) throws IOException
+ {
+
+ // Output the FTP response
+
+ if (m_out != null)
+ {
+ m_out.write(msg);
+ m_out.write(CRLF);
+ m_out.flush();
+ }
+ }
+
/**
* Process a user command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procUser(FTPRequest req) throws IOException
@@ -750,8 +762,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a password command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procPassword(FTPRequest req) throws IOException
@@ -827,6 +838,10 @@ public class FTPSrvSession extends SrvSession implements Runnable
sendFTPResponse(230, "User logged in, proceed");
setLoggedOn(true);
+ // Save the client info
+
+ setClientInformation( cInfo);
+
// DEBUG
if (logger.isDebugEnabled() && hasDebug(DBG_STATE))
@@ -909,8 +924,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a port command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procPort(FTPRequest req) throws IOException
@@ -990,8 +1004,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a passive command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procPassive(FTPRequest req) throws IOException
@@ -1049,8 +1062,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a print working directory command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procPrintWorkDir(FTPRequest req) throws IOException
@@ -1078,8 +1090,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a change working directory command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procChangeWorkDir(FTPRequest req) throws IOException
@@ -1128,8 +1139,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a change directory up command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procCdup(FTPRequest req) throws IOException
@@ -1177,8 +1187,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a long directory listing command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procList(FTPRequest req) throws IOException
@@ -1380,8 +1389,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a short directory listing command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procNList(FTPRequest req) throws IOException
@@ -1532,8 +1540,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a system status command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procSystemStatus(FTPRequest req) throws IOException
@@ -1547,8 +1554,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a server status command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procServerStatus(FTPRequest req) throws IOException
@@ -1562,8 +1568,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a help command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procHelp(FTPRequest req) throws IOException
@@ -1577,8 +1582,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a no-op command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procNoop(FTPRequest req) throws IOException
@@ -1592,8 +1596,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a quit command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procQuit(FTPRequest req) throws IOException
@@ -1616,8 +1619,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a type command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procType(FTPRequest req) throws IOException
@@ -1660,8 +1662,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a restart command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procRestart(FTPRequest req) throws IOException
@@ -1708,8 +1709,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a return file command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procReturnFile(FTPRequest req) throws IOException
@@ -1760,198 +1760,215 @@ public class FTPSrvSession extends SrvSession implements Runnable
return;
}
- // Get the data connection socket
-
- Socket dataSock = null;
-
- try
+ // Check if a seperate thread should be used for the data transfer
+
+ if ( UseThreadedDataTransfer == true)
{
- dataSock = m_dataSess.getSocket();
+ // DEBUG
+
+ if (logger.isDebugEnabled() && hasDebug(DBG_FILE))
+ logger.debug("Returning (threaded) ftp="
+ + ftpPath.getFTPPath() + ", share=" + ftpPath.getShareName() + ", path=" + ftpPath.getSharePath());
+
+ // Start the transfer in a seperate thread
+
+ m_dataSess.doReturnFile( ftpPath, m_restartPos, req.getArgument());
}
- catch (Exception ex)
+ else
{
- }
-
- if (dataSock == null)
- {
- sendFTPResponse(426, "Connection closed; transfer aborted");
- return;
- }
-
- // DEBUG
-
- if (logger.isDebugEnabled() && hasDebug(DBG_FILE))
- logger.debug("Returning ftp="
- + ftpPath.getFTPPath() + ", share=" + ftpPath.getShareName() + ", path=" + ftpPath.getSharePath());
-
- // Send the file to the client
-
- OutputStream os = null;
- DiskInterface disk = null;
- TreeConnection tree = null;
- NetworkFile netFile = null;
-
- try
- {
-
- // Open an output stream to the client
-
- os = dataSock.getOutputStream();
-
- // Create a temporary tree connection
-
- tree = getTreeConnection(ftpPath.getSharedDevice());
-
- // Check if the file exists and it is a file, if so then open the
- // file
-
- disk = (DiskInterface) ftpPath.getSharedDevice().getInterface();
-
- // Create the file open parameters
-
- FileOpenParams params = new FileOpenParams(ftpPath.getSharePath(), FileAction.OpenIfExists,
- AccessMode.ReadOnly, 0);
-
- // Check if the file exists and it is a file
-
- int sts = disk.fileExists(this, tree, ftpPath.getSharePath());
-
- if (sts == FileStatus.FileExists)
- {
-
- // Open the file
-
- netFile = disk.openFile(this, tree, params);
- }
-
- // Check if the file has been opened
-
- if (netFile == null)
- {
- sendFTPResponse(550, "File " + req.getArgument() + " not available");
- return;
- }
-
- // Allocate the buffer for the file data
-
- byte[] buf = new byte[DEFAULT_BUFFERSIZE];
- long filePos = m_restartPos;
-
- int len = -1;
-
- while (filePos < netFile.getFileSize())
- {
-
- // Read another block of data from the file
-
- len = disk.readFile(this, tree, netFile, buf, 0, buf.length, filePos);
-
- // DEBUG
-
- if (logger.isDebugEnabled() && hasDebug(DBG_FILEIO))
- logger.debug(" Write len=" + len + " bytes");
-
- // Write the current data block to the client, update the file
- // position
-
- if (len > 0)
- {
-
- // Write the data to the client
-
- os.write(buf, 0, len);
-
- // Update the file position
-
- filePos += len;
- }
- }
-
- // Close the output stream to the client
-
- os.close();
- os = null;
-
- // Indicate that the file has been transmitted
-
- sendFTPResponse(226, "Closing data connection");
-
- // Close the data session
-
- getFTPServer().releaseDataSession(m_dataSess);
- m_dataSess = null;
-
- // Close the network file
-
- disk.closeFile(this, tree, netFile);
- netFile = null;
-
- // DEBUG
-
- if (logger.isDebugEnabled() && hasDebug(DBG_FILEIO))
- logger.debug(" Transfer complete, file closed");
- }
- catch (SocketException ex)
- {
-
- // DEBUG
-
- if (logger.isDebugEnabled() && hasDebug(DBG_ERROR))
- logger.debug(" Error during transfer", ex);
-
- // Close the data socket to the client
-
- if (m_dataSess != null)
- {
- m_dataSess.closeSession();
- m_dataSess = null;
- }
-
- // Indicate that there was an error during transmission of the file
- // data
-
- sendFTPResponse(426, "Data connection closed by client");
- }
- catch (Exception ex)
- {
-
- // DEBUG
-
- if (logger.isDebugEnabled() && hasDebug(DBG_ERROR))
- logger.debug(" Error during transfer", ex);
-
- // Indicate that there was an error during transmission of the file
- // data
-
- sendFTPResponse(426, "Error during transmission");
- } finally
- {
-
- // Close the network file
-
- if (netFile != null && disk != null && tree != null)
- disk.closeFile(this, tree, netFile);
-
- // Close the output stream to the client
-
- if (os != null)
- os.close();
-
- // Close the data connection to the client
-
- if (m_dataSess != null)
- {
- getFTPServer().releaseDataSession(m_dataSess);
- m_dataSess = null;
- }
+ // Get the data connection socket
+
+ Socket dataSock = null;
+
+ try
+ {
+ dataSock = m_dataSess.getSocket();
+ }
+ catch (Exception ex)
+ {
+ }
+
+ if (dataSock == null)
+ {
+ sendFTPResponse(426, "Connection closed; transfer aborted");
+ return;
+ }
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && hasDebug(DBG_FILE))
+ logger.debug("Returning ftp="
+ + ftpPath.getFTPPath() + ", share=" + ftpPath.getShareName() + ", path=" + ftpPath.getSharePath());
+
+ // Send the file to the client
+
+ OutputStream os = null;
+ DiskInterface disk = null;
+ TreeConnection tree = null;
+ NetworkFile netFile = null;
+
+ try
+ {
+
+ // Open an output stream to the client
+
+ os = dataSock.getOutputStream();
+
+ // Create a temporary tree connection
+
+ tree = getTreeConnection(ftpPath.getSharedDevice());
+
+ // Check if the file exists and it is a file, if so then open the
+ // file
+
+ disk = (DiskInterface) ftpPath.getSharedDevice().getInterface();
+
+ // Create the file open parameters
+
+ FileOpenParams params = new FileOpenParams(ftpPath.getSharePath(), FileAction.OpenIfExists,
+ AccessMode.ReadOnly, 0);
+
+ // Check if the file exists and it is a file
+
+ int sts = disk.fileExists(this, tree, ftpPath.getSharePath());
+
+ if (sts == FileStatus.FileExists)
+ {
+
+ // Open the file
+
+ netFile = disk.openFile(this, tree, params);
+ }
+
+ // Check if the file has been opened
+
+ if (netFile == null)
+ {
+ sendFTPResponse(550, "File " + req.getArgument() + " not available");
+ return;
+ }
+
+ // Allocate the buffer for the file data
+
+ byte[] buf = new byte[DEFAULT_BUFFERSIZE];
+ long filePos = m_restartPos;
+
+ int len = -1;
+
+ while (filePos < netFile.getFileSize())
+ {
+
+ // Read another block of data from the file
+
+ len = disk.readFile(this, tree, netFile, buf, 0, buf.length, filePos);
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && hasDebug(DBG_FILEIO))
+ logger.debug(" Write len=" + len + " bytes");
+
+ // Write the current data block to the client, update the file
+ // position
+
+ if (len > 0)
+ {
+
+ // Write the data to the client
+
+ os.write(buf, 0, len);
+
+ // Update the file position
+
+ filePos += len;
+ }
+ }
+
+ // Close the output stream to the client
+
+ os.close();
+ os = null;
+
+ // Indicate that the file has been transmitted
+
+ sendFTPResponse(226, "Closing data connection");
+
+ // Close the data session
+
+ getFTPServer().releaseDataSession(m_dataSess);
+ m_dataSess = null;
+
+ // Close the network file
+
+ disk.closeFile(this, tree, netFile);
+ netFile = null;
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && hasDebug(DBG_FILEIO))
+ logger.debug(" Transfer complete, file closed");
+ }
+ catch (SocketException ex)
+ {
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && hasDebug(DBG_ERROR))
+ logger.debug(" Error during transfer", ex);
+
+ // Close the data socket to the client
+
+ if (m_dataSess != null)
+ {
+ m_dataSess.closeSession();
+ m_dataSess = null;
+ }
+
+ // Indicate that there was an error during transmission of the file
+ // data
+
+ sendFTPResponse(426, "Data connection closed by client");
+ }
+ catch (Exception ex)
+ {
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && hasDebug(DBG_ERROR))
+ logger.debug(" Error during transfer", ex);
+
+ // Indicate that there was an error during transmission of the file
+ // data
+
+ sendFTPResponse(426, "Error during transmission");
+ }
+ finally
+ {
+
+ // Close the network file
+
+ if (netFile != null && disk != null && tree != null)
+ disk.closeFile(this, tree, netFile);
+
+ // Close the output stream to the client
+
+ if (os != null)
+ os.close();
+
+ // Close the data connection to the client
+
+ if (m_dataSess != null)
+ {
+ getFTPServer().releaseDataSession(m_dataSess);
+ m_dataSess = null;
+ }
+ }
}
}
/**
* Process a store file command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procStoreFile(FTPRequest req) throws IOException
@@ -1982,252 +1999,269 @@ public class FTPSrvSession extends SrvSession implements Runnable
return;
}
- // Send the file to the client
-
- InputStream is = null;
- DiskInterface disk = null;
- TreeConnection tree = null;
- NetworkFile netFile = null;
-
- try
+ // Check if a seperate thread should be used for the data transfer
+
+ if ( UseThreadedDataTransfer == true)
{
-
- // Create a temporary tree connection
-
- tree = getTreeConnection(ftpPath.getSharedDevice());
-
- // Check if the session has the required access to the filesystem
-
- if (tree == null || tree.hasWriteAccess() == false)
- {
-
- // Session does not have write access to the filesystem
-
- sendFTPResponse(550, "Access denied");
- return;
- }
-
- // Check if the file exists
-
- disk = (DiskInterface) ftpPath.getSharedDevice().getInterface();
- int sts = disk.fileExists(this, tree, ftpPath.getSharePath());
-
- if (sts == FileStatus.DirectoryExists)
- {
-
- // Return an error status
-
- sendFTPResponse(500, "Invalid path (existing directory)");
- return;
- }
-
- // Create the file open parameters
-
- FileOpenParams params = new FileOpenParams(ftpPath.getSharePath(),
- sts == FileStatus.FileExists ? FileAction.TruncateExisting : FileAction.CreateNotExist,
- AccessMode.ReadWrite, 0);
-
- // Create a new file to receive the data
-
- if (sts == FileStatus.FileExists)
- {
-
- // Overwrite the existing file
-
- netFile = disk.openFile(this, tree, params);
- }
- else
- {
-
- // Create a new file
-
- netFile = disk.createFile(this, tree, params);
- }
-
- // Notify change listeners that a new file has been created
-
- DiskDeviceContext diskCtx = (DiskDeviceContext) tree.getContext();
-
- if (diskCtx.hasChangeHandler())
- diskCtx.getChangeHandler().notifyFileChanged(NotifyChange.ActionAdded, ftpPath.getSharePath());
-
- // Send the intermediate response
-
- sendFTPResponse(150, "File status okay, about to open data connection");
-
- // Check if there is an active data session
-
- if (m_dataSess == null)
- {
- sendFTPResponse(425, "Can't open data connection");
- return;
- }
-
- // Get the data connection socket
-
- Socket dataSock = null;
-
- try
- {
- dataSock = m_dataSess.getSocket();
- }
- catch (Exception ex)
- {
- }
-
- if (dataSock == null)
- {
- sendFTPResponse(426, "Connection closed; transfer aborted");
- return;
- }
-
- // Open an input stream from the client
-
- is = dataSock.getInputStream();
-
// DEBUG
-
+
if (logger.isDebugEnabled() && hasDebug(DBG_FILE))
- logger.debug("Storing ftp="
+ logger.debug("Storing (threaded) ftp="
+ ftpPath.getFTPPath() + ", share=" + ftpPath.getShareName() + ", path="
+ ftpPath.getSharePath());
- // Allocate the buffer for the file data
-
- byte[] buf = new byte[DEFAULT_BUFFERSIZE];
- long filePos = 0;
- int len = is.read(buf, 0, buf.length);
-
- while (len > 0)
- {
-
- // DEBUG
-
- if (logger.isDebugEnabled() && hasDebug(DBG_FILEIO))
- logger.debug(" Receive len=" + len + " bytes");
-
- // Write the current data block to the file, update the file
- // position
-
- disk.writeFile(this, tree, netFile, buf, 0, len, filePos);
- filePos += len;
-
- // Read another block of data from the client
-
- len = is.read(buf, 0, buf.length);
- }
-
- // Close the input stream from the client
-
- is.close();
- is = null;
-
- // Close the network file
-
- disk.closeFile(this, tree, netFile);
- netFile = null;
-
- // Indicate that the file has been received
-
- sendFTPResponse(226, "Closing data connection");
-
- // DEBUG
-
- if (logger.isDebugEnabled() && hasDebug(DBG_FILEIO))
- logger.debug(" Transfer complete, file closed");
+ // Start the transfer in a seperate thread
+
+ m_dataSess.doStoreFile( ftpPath, m_restartPos, req.getArgument());
}
- catch( AccessDeniedException ex)
+ else
{
- // DEBUG
-
- if (logger.isDebugEnabled() && hasDebug(DBG_ERROR))
- logger.debug(" Access denied", ex);
-
- // Session does not have write access to the filesystem
-
- sendFTPResponse(550, "Access denied");
- }
- catch (SocketException ex)
- {
- // DEBUG
-
- if (logger.isDebugEnabled() && hasDebug(DBG_ERROR))
- logger.debug(" Error during transfer", ex);
-
- // Close the data socket to the client
-
- if (m_dataSess != null)
- {
- getFTPServer().releaseDataSession(m_dataSess);
- m_dataSess = null;
- }
-
- // Indicate that there was an error during transmission of the file
- // data
-
- sendFTPResponse(426, "Data connection closed by client");
- }
- catch (DiskFullException ex)
- {
-
- // DEBUG
-
- if (logger.isDebugEnabled() && hasDebug(DBG_ERROR))
- logger.debug(" Error during transfer", ex);
-
- // Close the data socket to the client
-
- if (m_dataSess != null)
- {
- getFTPServer().releaseDataSession(m_dataSess);
- m_dataSess = null;
- }
-
- // Indicate that there was an error during writing of the file
-
- sendFTPResponse(451, "Disk full");
- }
- catch (Exception ex)
- {
-
- // DEBUG
-
- if (logger.isDebugEnabled() && hasDebug(DBG_ERROR))
- logger.debug(" Error during transfer", ex);
-
- // Indicate that there was an error during transmission of the file
- // data
-
- sendFTPResponse(426, "Error during transmission");
- }
- finally
- {
-
- // Close the network file
-
- if (netFile != null && disk != null && tree != null)
- disk.closeFile(this, tree, netFile);
-
- // Close the input stream to the client
-
- if (is != null)
- is.close();
-
- // Close the data connection to the client
-
- if (m_dataSess != null)
- {
- getFTPServer().releaseDataSession(m_dataSess);
- m_dataSess = null;
- }
+ // Send the file to the client
+
+ InputStream is = null;
+ DiskInterface disk = null;
+ TreeConnection tree = null;
+ NetworkFile netFile = null;
+
+ try
+ {
+
+ // Create a temporary tree connection
+
+ tree = getTreeConnection(ftpPath.getSharedDevice());
+
+ // Check if the session has the required access to the filesystem
+
+ if (tree == null || tree.hasWriteAccess() == false)
+ {
+
+ // Session does not have write access to the filesystem
+
+ sendFTPResponse(550, "Access denied");
+ return;
+ }
+
+ // Check if the file exists
+
+ disk = (DiskInterface) ftpPath.getSharedDevice().getInterface();
+ int sts = disk.fileExists(this, tree, ftpPath.getSharePath());
+
+ if (sts == FileStatus.DirectoryExists)
+ {
+
+ // Return an error status
+
+ sendFTPResponse(500, "Invalid path (existing directory)");
+ return;
+ }
+
+ // Create the file open parameters
+
+ FileOpenParams params = new FileOpenParams(ftpPath.getSharePath(),
+ sts == FileStatus.FileExists ? FileAction.TruncateExisting : FileAction.CreateNotExist,
+ AccessMode.ReadWrite, 0);
+
+ // Create a new file to receive the data
+
+ if (sts == FileStatus.FileExists)
+ {
+
+ // Overwrite the existing file
+
+ netFile = disk.openFile(this, tree, params);
+ }
+ else
+ {
+
+ // Create a new file
+
+ netFile = disk.createFile(this, tree, params);
+ }
+
+ // Notify change listeners that a new file has been created
+
+ DiskDeviceContext diskCtx = (DiskDeviceContext) tree.getContext();
+
+ if (diskCtx.hasChangeHandler())
+ diskCtx.getChangeHandler().notifyFileChanged(NotifyChange.ActionAdded, ftpPath.getSharePath());
+
+ // Send the intermediate response
+
+ sendFTPResponse(150, "File status okay, about to open data connection");
+
+ // Check if there is an active data session
+
+ if (m_dataSess == null)
+ {
+ sendFTPResponse(425, "Can't open data connection");
+ return;
+ }
+
+ // Get the data connection socket
+
+ Socket dataSock = null;
+
+ try
+ {
+ dataSock = m_dataSess.getSocket();
+ }
+ catch (Exception ex)
+ {
+ }
+
+ if (dataSock == null)
+ {
+ sendFTPResponse(426, "Connection closed; transfer aborted");
+ return;
+ }
+
+ // Open an input stream from the client
+
+ is = dataSock.getInputStream();
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && hasDebug(DBG_FILE))
+ logger.debug("Storing ftp="
+ + ftpPath.getFTPPath() + ", share=" + ftpPath.getShareName() + ", path="
+ + ftpPath.getSharePath());
+
+ // Allocate the buffer for the file data
+
+ byte[] buf = new byte[DEFAULT_BUFFERSIZE];
+ long filePos = 0;
+ int len = is.read(buf, 0, buf.length);
+
+ while (len > 0)
+ {
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && hasDebug(DBG_FILEIO))
+ logger.debug(" Receive len=" + len + " bytes");
+
+ // Write the current data block to the file, update the file
+ // position
+
+ disk.writeFile(this, tree, netFile, buf, 0, len, filePos);
+ filePos += len;
+
+ // Read another block of data from the client
+
+ len = is.read(buf, 0, buf.length);
+ }
+
+ // Close the input stream from the client
+
+ is.close();
+ is = null;
+
+ // Close the network file
+
+ disk.closeFile(this, tree, netFile);
+ netFile = null;
+
+ // Indicate that the file has been received
+
+ sendFTPResponse(226, "Closing data connection");
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && hasDebug(DBG_FILEIO))
+ logger.debug(" Transfer complete, file closed");
+ }
+ catch( AccessDeniedException ex)
+ {
+ // DEBUG
+
+ if (logger.isDebugEnabled() && hasDebug(DBG_ERROR))
+ logger.debug(" Access denied", ex);
+
+ // Session does not have write access to the filesystem
+
+ sendFTPResponse(550, "Access denied");
+ }
+ catch (SocketException ex)
+ {
+ // DEBUG
+
+ if (logger.isDebugEnabled() && hasDebug(DBG_ERROR))
+ logger.debug(" Error during transfer", ex);
+
+ // Close the data socket to the client
+
+ if (m_dataSess != null)
+ {
+ getFTPServer().releaseDataSession(m_dataSess);
+ m_dataSess = null;
+ }
+
+ // Indicate that there was an error during transmission of the file
+ // data
+
+ sendFTPResponse(426, "Data connection closed by client");
+ }
+ catch (DiskFullException ex)
+ {
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && hasDebug(DBG_ERROR))
+ logger.debug(" Error during transfer", ex);
+
+ // Close the data socket to the client
+
+ if (m_dataSess != null)
+ {
+ getFTPServer().releaseDataSession(m_dataSess);
+ m_dataSess = null;
+ }
+
+ // Indicate that there was an error during writing of the file
+
+ sendFTPResponse(451, "Disk full");
+ }
+ catch (Exception ex)
+ {
+
+ // DEBUG
+
+ if (logger.isDebugEnabled() && hasDebug(DBG_ERROR))
+ logger.debug(" Error during transfer", ex);
+
+ // Indicate that there was an error during transmission of the file
+ // data
+
+ sendFTPResponse(426, "Error during transmission");
+ }
+ finally
+ {
+
+ // Close the network file
+
+ if (netFile != null && disk != null && tree != null)
+ disk.closeFile(this, tree, netFile);
+
+ // Close the input stream to the client
+
+ if (is != null)
+ is.close();
+
+ // Close the data connection to the client
+
+ if (m_dataSess != null)
+ {
+ getFTPServer().releaseDataSession(m_dataSess);
+ m_dataSess = null;
+ }
+ }
}
}
/**
* Process a delete file command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procDeleteFile(FTPRequest req) throws IOException
@@ -2342,8 +2376,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a rename from command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procRenameFrom(FTPRequest req) throws IOException
@@ -2445,8 +2478,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a rename to command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procRenameTo(FTPRequest req) throws IOException
@@ -2573,8 +2605,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a create directory command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procCreateDirectory(FTPRequest req) throws IOException
@@ -2609,7 +2640,6 @@ public class FTPSrvSession extends SrvSession implements Runnable
DiskInterface disk = null;
TreeConnection tree = null;
- NetworkFile netFile = null;
try
{
@@ -2682,8 +2712,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a delete directory command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procRemoveDirectory(FTPRequest req) throws IOException
@@ -2729,7 +2758,6 @@ public class FTPSrvSession extends SrvSession implements Runnable
DiskInterface disk = null;
TreeConnection tree = null;
- NetworkFile netFile = null;
try
{
@@ -2800,8 +2828,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a modify date/time command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procModifyDateTime(FTPRequest req) throws IOException
@@ -2812,11 +2839,33 @@ public class FTPSrvSession extends SrvSession implements Runnable
sendFTPResponse(550, "Not implemented yet");
}
+ /**
+ * Process a features command
+ *
+ * @param req FTPRequest
+ * @exception IOException
+ */
+ protected final void procFeatures(FTPRequest req) throws IOException
+ {
+ // Check if the user is logged in
+
+ if (isLoggedOn() == false)
+ {
+ sendFTPResponse(500, "");
+ return;
+ }
+
+ // Send back the list of features supported by this FTP server
+
+ sendFTPResponse( 211, "Features");
+ sendFTPResponse( "SIZE");
+ sendFTPResponse( 211, "End");
+ }
+
/**
* Process a file size command
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procFileSize(FTPRequest req) throws IOException
@@ -2843,7 +2892,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
FTPPath ftpPath = generatePathForRequest(req, true);
if (ftpPath == null)
{
- sendFTPResponse(500, "Invalid path");
+ sendFTPResponse(550, "Invalid path");
return;
}
@@ -2892,8 +2941,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a structure command. This command is obsolete.
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procStructure(FTPRequest req) throws IOException
@@ -2912,8 +2960,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
/**
* Process a mode command. This command is obsolete.
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procMode(FTPRequest req) throws IOException
@@ -2929,11 +2976,45 @@ public class FTPSrvSession extends SrvSession implements Runnable
sendFTPResponse(504, "Obsolete");
}
+ /**
+ * Abort an active file transfer
+ *
+ * @param req FTPRequest
+ * @exception IOException
+ */
+ protected final void procAbort(FTPRequest req) throws IOException
+ {
+ // Check if threaded transfers are enabled
+
+ if ( UseThreadedDataTransfer == true)
+ {
+ // Check if there is an active data connection
+
+ if ( m_dataSess != null)
+ {
+ // Abort the data transfer
+
+ m_dataSess.abortTransfer();
+ }
+ else
+ {
+ // Send an error status, no transfer in progress
+
+ sendFTPResponse( 226, "Data connection not active");
+ }
+ }
+ else
+ {
+ // Abort not implemented for inline transfers
+
+ sendFTPResponse( 502, "Abort not implemented");
+ }
+ }
+
/**
* Process an allocate command. This command is obsolete.
*
- * @param req
- * FTPRequest
+ * @param req FTPRequest
* @exception IOException
*/
protected final void procAllocate(FTPRequest req) throws IOException
@@ -2948,12 +3029,9 @@ public class FTPSrvSession extends SrvSession implements Runnable
* Build a list of file name or file information objects for the specified
* server path
*
- * @param path
- * FTPPath
- * @param nameOnly
- * boolean
- * @param hidden
- * boolean
+ * @param path FTPPath
+ * @param nameOnly boolean
+ * @param hidden boolean
* @return Vector
*/
protected final Vector listFilesForPath(FTPPath path, boolean nameOnly, boolean hidden)
@@ -3144,8 +3222,7 @@ public class FTPSrvSession extends SrvSession implements Runnable
* Get a tree connection for the specified shared device. Creates and caches
* a new tree connection if required.
*
- * @param share
- * SharedDevice
+ * @param share SharedDevice
* @return TreeConnection
*/
protected final TreeConnection getTreeConnection(SharedDevice share)
@@ -3338,7 +3415,6 @@ public class FTPSrvSession extends SrvSession implements Runnable
switch (ftpReq.isCommand())
{
-
// User command
case FTPCommand.User:
@@ -3516,6 +3592,18 @@ public class FTPSrvSession extends SrvSession implements Runnable
procAllocate(ftpReq);
break;
+ // Abort an active file data transfer
+
+ case FTPCommand.Abor:
+ procAbort(ftpReq);
+ break;
+
+ // Return the list of features that this server supports
+
+ case FTPCommand.Feat:
+ procFeatures(ftpReq);
+ break;
+
// Unknown/unimplemented command
default:
@@ -3600,4 +3688,33 @@ public class FTPSrvSession extends SrvSession implements Runnable
if (hasDebug(DBG_STATE))
logger.debug("Server session closed");
}
+
+ /**
+ * Authenticate an associated FTP data session using the same credentials as the main FTP session
+ *
+ * @exception AuthenticationException
+ */
+ protected void authenticateDataSession() throws org.alfresco.repo.security.authentication.AuthenticationException
+ {
+ // Use the normal authentication service as we have the plaintext password
+
+ AuthenticationService authService = getServer().getConfiguration().getAuthenticationService();
+
+ // Authenticate the user
+
+ ClientInfo cInfo = getClientInformation();
+
+ if ( cInfo.isGuest())
+ {
+ // Authenticate as the guest user
+
+ authService.authenticateAsGuest();
+ }
+ else
+ {
+ // Authenticate as a normal user
+
+ authService.authenticate( cInfo.getUserName(), cInfo.getPasswordAsCharArray());
+ }
+ }
}
\ No newline at end of file
diff --git a/source/java/org/alfresco/filesys/server/config/ServerConfiguration.java b/source/java/org/alfresco/filesys/server/config/ServerConfiguration.java
index b496d631dc..d11781753a 100644
--- a/source/java/org/alfresco/filesys/server/config/ServerConfiguration.java
+++ b/source/java/org/alfresco/filesys/server/config/ServerConfiguration.java
@@ -460,8 +460,6 @@ public class ServerConfiguration implements ApplicationListener
throw new AlfrescoRuntimeException("Property 'configService' not set");
}
- initialised = false;
-
// Create the configuration context
ConfigLookupContext configCtx = new ConfigLookupContext(ConfigArea);
@@ -470,59 +468,106 @@ public class ServerConfiguration implements ApplicationListener
determinePlatformType();
+ // Initialize the filesystems
+
+ boolean filesysInitOK = false;
+ Config config = null;
+
try
{
-
- // Process the CIFS server configuration
-
- Config config = configService.getConfig(ConfigCIFS, configCtx);
- processCIFSServerConfig(config);
-
- // Process the FTP server configuration
-
- config = configService.getConfig(ConfigFTP, configCtx);
- processFTPServerConfig(config);
-
- // Process the security configuration
-
- config = configService.getConfig(ConfigSecurity, configCtx);
- processSecurityConfig(config);
-
// Process the filesystems configuration
config = configService.getConfig(ConfigFilesystems, configCtx);
processFilesystemsConfig(config);
-
- // Successful initialisation
- initialised = true;
+
+ // Indicate that the filesystems were initialized
+
+ filesysInitOK = true;
}
- catch (UnsatisfiedLinkError ex)
- {
- // Error accessing the Win32NetBIOS DLL code
-
- logger.error("Error accessing Win32 NetBIOS, check DLL is on the path");
-
- // Disable the CIFS server
-
- setNetBIOSSMB(false);
- setTcpipSMB(false);
- setWin32NetBIOS(false);
-
- setSMBServerEnabled(false);
- }
- catch (Throwable ex)
+ catch (Exception ex)
{
// Configuration error
logger.error("File server configuration error, " + ex.getMessage(), ex);
+ }
- // Disable the CIFS server
+ // Initialize the CIFS and FTP servers, if the filesystem(s) initialized successfully
+
+ if ( filesysInitOK == true)
+ {
+ // Initialize the CIFS server
- setNetBIOSSMB(false);
- setTcpipSMB(false);
- setWin32NetBIOS(false);
+ try
+ {
+
+ // Process the CIFS server configuration
+
+ config = configService.getConfig(ConfigCIFS, configCtx);
+ processCIFSServerConfig(config);
+
+ // Process the security configuration
+
+ config = configService.getConfig(ConfigSecurity, configCtx);
+ processSecurityConfig(config);
+
+ // Log the successful startup
+
+ logger.info("CIFS server started");
+ }
+ catch (UnsatisfiedLinkError ex)
+ {
+ // Error accessing the Win32NetBIOS DLL code
+
+ logger.error("Error accessing Win32 NetBIOS, check DLL is on the path");
+
+ // Disable the CIFS server
+
+ setNetBIOSSMB(false);
+ setTcpipSMB(false);
+ setWin32NetBIOS(false);
+
+ setSMBServerEnabled(false);
+ }
+ catch (Throwable ex)
+ {
+ // Configuration error
+
+ logger.error("CIFS server configuration error, " + ex.getMessage(), ex);
+
+ // Disable the CIFS server
+
+ setNetBIOSSMB(false);
+ setTcpipSMB(false);
+ setWin32NetBIOS(false);
+
+ setSMBServerEnabled(false);
+ }
- setSMBServerEnabled(false);
+ // Initialize the FTP server
+
+ try
+ {
+ // Process the FTP server configuration
+
+ config = configService.getConfig(ConfigFTP, configCtx);
+ processFTPServerConfig(config);
+
+ // Log the successful startup
+
+ logger.info("FTP server started");
+ }
+ catch (Exception ex)
+ {
+ // Configuration error
+
+ logger.error("FTP server configuration error, " + ex.getMessage(), ex);
+ }
+ }
+ else
+ {
+ // Log the error
+
+ logger.error("CIFS and FTP servers not started due to filesystem initialization error");
}
}
diff --git a/source/java/org/alfresco/filesys/server/filesys/DiskDeviceContext.java b/source/java/org/alfresco/filesys/server/filesys/DiskDeviceContext.java
index 6bd50144dc..a4a108db7b 100644
--- a/source/java/org/alfresco/filesys/server/filesys/DiskDeviceContext.java
+++ b/source/java/org/alfresco/filesys/server/filesys/DiskDeviceContext.java
@@ -174,7 +174,11 @@ public class DiskDeviceContext extends DeviceContext
*/
public void CloseContext()
{
-
+ // Close the notify handler
+
+ if ( hasChangeHandler())
+ getChangeHandler().shutdownRequest();
+
// Call the base class
super.CloseContext();
diff --git a/source/java/org/alfresco/filesys/server/filesys/FileSystem.java b/source/java/org/alfresco/filesys/server/filesys/FileSystem.java
index fb524ca33f..0776077075 100644
--- a/source/java/org/alfresco/filesys/server/filesys/FileSystem.java
+++ b/source/java/org/alfresco/filesys/server/filesys/FileSystem.java
@@ -28,17 +28,18 @@ public final class FileSystem
// Filesystem attributes
public static final int CaseSensitiveSearch = 0x00000001;
- public static final int CasePreservedNames = 0x00000002;
- public static final int UnicodeOnDisk = 0x00000004;
- public static final int PersistentACLs = 0x00000008;
- public static final int FileCompression = 0x00000010;
- public static final int VolumeQuotas = 0x00000020;
- public static final int SparseFiles = 0x00000040;
- public static final int ReparsePoints = 0x00000080;
- public static final int RemoteStorage = 0x00000100;
- public static final int VolumeIsCompressed = 0x00008000;
- public static final int ObjectIds = 0x00010000;
- public static final int Encryption = 0x00020000;
+ public static final int CasePreservedNames = 0x00000002;
+ public static final int UnicodeOnDisk = 0x00000004;
+ public static final int PersistentACLs = 0x00000008;
+ public static final int FileCompression = 0x00000010;
+ public static final int VolumeQuotas = 0x00000020;
+ public static final int SparseFiles = 0x00000040;
+ public static final int ReparsePoints = 0x00000080;
+ public static final int RemoteStorage = 0x00000100;
+ public static final int LFNAPISupport = 0x00004000;
+ public static final int VolumeIsCompressed = 0x00008000;
+ public static final int ObjectIds = 0x00010000;
+ public static final int Encryption = 0x00020000;
// Filesystem type strings
diff --git a/source/java/org/alfresco/filesys/smb/mailslot/HostAnnouncer.java b/source/java/org/alfresco/filesys/smb/mailslot/HostAnnouncer.java
index aa832c43da..415b4df160 100644
--- a/source/java/org/alfresco/filesys/smb/mailslot/HostAnnouncer.java
+++ b/source/java/org/alfresco/filesys/smb/mailslot/HostAnnouncer.java
@@ -75,6 +75,10 @@ public abstract class HostAnnouncer extends Thread
private byte m_updateCount;
+ // Error count
+
+ private int m_errorCount;
+
// Shutdown flag, host announcer should remove the announced name as it shuts down
private boolean m_shutdown = false;
@@ -156,6 +160,16 @@ public abstract class HostAnnouncer extends Thread
return m_names.numberOfStrings();
}
+ /**
+ * Return the error count
+ *
+ * @return int
+ */
+ protected final int getErrorCount()
+ {
+ return m_errorCount;
+ }
+
/**
* Return the specified host name being announced.
*
@@ -493,6 +507,24 @@ public abstract class HostAnnouncer extends Thread
m_srvtype = typ;
}
+ /**
+ * Increment the error count
+ *
+ * @return int
+ */
+ protected final int incrementErrorCount()
+ {
+ return ++m_errorCount;
+ }
+
+ /**
+ * Clear the error count
+ */
+ protected final void clearErrorCount()
+ {
+ m_errorCount = 0;
+ }
+
/**
* Shutdown the host announcer and remove the announced name from Network Neighborhood.
*/
diff --git a/source/java/org/alfresco/filesys/smb/mailslot/Win32NetBIOSHostAnnouncer.java b/source/java/org/alfresco/filesys/smb/mailslot/Win32NetBIOSHostAnnouncer.java
index 8a681ca48f..b955c7191c 100644
--- a/source/java/org/alfresco/filesys/smb/mailslot/Win32NetBIOSHostAnnouncer.java
+++ b/source/java/org/alfresco/filesys/smb/mailslot/Win32NetBIOSHostAnnouncer.java
@@ -32,6 +32,10 @@ import org.alfresco.filesys.smb.server.win32.Win32NetBIOSSessionSocketHandler;
public class Win32NetBIOSHostAnnouncer extends HostAnnouncer
{
+ // Number of send errors before marking the LANA as offline
+
+ private static final int SendErrorCount = 3;
+
// Associated session handler
Win32NetBIOSSessionSocketHandler m_handler;
@@ -120,6 +124,36 @@ public class Win32NetBIOSHostAnnouncer extends HostAnnouncer
int sts = Win32NetBIOS.SendDatagram(getLana(), getNameNumber(), destName, buf, 0, len);
if ( sts != NetBIOS.NRC_GoodRet)
- logger.debug("Win32NetBIOS host announce error " + NetBIOS.getErrorString( -sts));
+ {
+ // Log the error
+
+ if ( logger.isErrorEnabled())
+ logger.error("Host announce error " + NetBIOS.getErrorString( -sts) +
+ " (LANA " + getLana() + ")");
+
+ // Update the error count
+
+ if ( incrementErrorCount() == SendErrorCount)
+ {
+ // Mark the LANA as offline
+
+ m_handler.lanaStatusChange( getLana(), false);
+
+ // Clear the error count
+
+ clearErrorCount();
+
+ // Log the error
+
+ if ( logger.isErrorEnabled())
+ logger.error("Marked LANA as unavailable due to send errors");
+ }
+ }
+ else
+ {
+ // Clear the error count
+
+ clearErrorCount();
+ }
}
}
\ No newline at end of file
diff --git a/source/java/org/alfresco/filesys/smb/mailslot/WinsockNetBIOSHostAnnouncer.java b/source/java/org/alfresco/filesys/smb/mailslot/WinsockNetBIOSHostAnnouncer.java
index b51028aaa9..147a314911 100644
--- a/source/java/org/alfresco/filesys/smb/mailslot/WinsockNetBIOSHostAnnouncer.java
+++ b/source/java/org/alfresco/filesys/smb/mailslot/WinsockNetBIOSHostAnnouncer.java
@@ -16,6 +16,8 @@
*/
package org.alfresco.filesys.smb.mailslot;
+import java.io.IOException;
+
import org.alfresco.filesys.netbios.NetBIOSName;
import org.alfresco.filesys.netbios.win32.NetBIOS;
import org.alfresco.filesys.netbios.win32.NetBIOSSocket;
@@ -34,6 +36,10 @@ import org.alfresco.filesys.smb.server.win32.Win32NetBIOSSessionSocketHandler;
*/
public class WinsockNetBIOSHostAnnouncer extends HostAnnouncer
{
+ // Number of send errors before marking the LANA as offline
+
+ private static final int SendErrorCount = 3;
+
// Associated session handler
private Win32NetBIOSSessionSocketHandler m_handler;
@@ -116,8 +122,49 @@ public class WinsockNetBIOSHostAnnouncer extends HostAnnouncer
// Send the host announce datagram via the Win32 Netbios() API call
- int sts = m_dgramSocket.sendDatagram(destNbName, buf, 0, len);
- if ( sts != len)
- logger.debug("WinsockNetBIOS host announce error");
+ boolean txOK = false;
+
+ try
+ {
+ int sts = m_dgramSocket.sendDatagram(destNbName, buf, 0, len);
+ if ( sts == len)
+ txOK = true;
+ }
+ catch ( IOException ex)
+ {
+ // Log the error
+
+ if ( logger.isErrorEnabled())
+ logger.error("Host announce error, " + ex.getMessage() + ", (LANA " + getLana() + ")");
+ }
+
+ // Check if the send was successful
+
+ if ( txOK == false)
+ {
+ // Update the error count
+
+ if ( incrementErrorCount() == SendErrorCount)
+ {
+ // Mark the LANA as offline
+
+ m_handler.lanaStatusChange( getLana(), false);
+
+ // Clear the error count
+
+ clearErrorCount();
+
+ // Log the error
+
+ if ( logger.isErrorEnabled())
+ logger.error("Marked LANA as unavailable due to send errors, (LANA " + getLana() + ")");
+ }
+ }
+ else
+ {
+ // Clear the error count
+
+ clearErrorCount();
+ }
}
}
\ No newline at end of file
diff --git a/source/java/org/alfresco/filesys/smb/server/FindInfoPacker.java b/source/java/org/alfresco/filesys/smb/server/FindInfoPacker.java
index eb0045f03e..35febce405 100644
--- a/source/java/org/alfresco/filesys/smb/server/FindInfoPacker.java
+++ b/source/java/org/alfresco/filesys/smb/server/FindInfoPacker.java
@@ -40,24 +40,24 @@ class FindInfoPacker
// File information levels
- public static final int InfoStandard = 1;
- public static final int InfoQueryEASize = 2;
- public static final int InfoQueryEAFromList = 3;
- public static final int InfoDirectory = 0x101;
- public static final int InfoFullDirectory = 0x102;
- public static final int InfoNames = 0x103;
- public static final int InfoDirectoryBoth = 0x104;
- public static final int InfoMacHfsInfo = 0x302;
+ public static final int InfoStandard = 1;
+ public static final int InfoQueryEASize = 2;
+ public static final int InfoQueryEAFromList = 3;
+ public static final int InfoDirectory = 0x101;
+ public static final int InfoFullDirectory = 0x102;
+ public static final int InfoNames = 0x103;
+ public static final int InfoDirectoryBoth = 0x104;
+ public static final int InfoMacHfsInfo = 0x302;
// File information fixed lengths, includes nulls on strings.
- public static final int InfoStandardLen = 24;
- public static final int InfoQueryEASizeLen = 28;
- public static final int InfoDirectoryLen = 64;
- public static final int InfoFullDirectoryLen = 68;
- public static final int InfoNamesLen = 12;
- public static final int InfoDirectoryBothLen = 94;
- public static final int InfoMacHfsLen = 120;
+ public static final int InfoStandardLen = 24;
+ public static final int InfoQueryEASizeLen = 28;
+ public static final int InfoDirectoryLen = 64;
+ public static final int InfoFullDirectoryLen = 68;
+ public static final int InfoNamesLen = 12;
+ public static final int InfoDirectoryBothLen = 94;
+ public static final int InfoMacHfsLen = 120;
/**
* Pack a file information object into the specified buffer, using information level 1 format.
@@ -426,7 +426,7 @@ class FindInfoPacker
// Align the buffer pointer and set the offset to the next file information entry
- buf.longwordAlign();
+ buf.wordAlign();
int curPos = buf.getPosition();
buf.setPosition(startPos);
@@ -518,7 +518,7 @@ class FindInfoPacker
// Align the buffer pointer and set the offset to the next file information entry
- buf.longwordAlign();
+ buf.wordAlign();
int curPos = buf.getPosition();
buf.setPosition(startPos);
@@ -615,7 +615,7 @@ class FindInfoPacker
// Align the buffer pointer and set the offset to the next file information entry
- buf.longwordAlign();
+ buf.wordAlign();
int curPos = buf.getPosition();
buf.setPosition(startPos);
@@ -718,7 +718,7 @@ class FindInfoPacker
// Align the buffer pointer and set the offset to the next file information entry
- buf.longwordAlign();
+ buf.wordAlign();
int curPos = buf.getPosition();
buf.setPosition(startPos);
@@ -839,7 +839,7 @@ class FindInfoPacker
// Align the buffer pointer and set the offset to the next file information entry
- buf.longwordAlign();
+ buf.wordAlign();
int curPos = buf.getPosition();
buf.setPosition(startPos);
diff --git a/source/java/org/alfresco/filesys/smb/server/NTProtocolHandler.java b/source/java/org/alfresco/filesys/smb/server/NTProtocolHandler.java
index 33f53eb4ad..c8a6022dee 100644
--- a/source/java/org/alfresco/filesys/smb/server/NTProtocolHandler.java
+++ b/source/java/org/alfresco/filesys/smb/server/NTProtocolHandler.java
@@ -6576,41 +6576,12 @@ public class NTProtocolHandler extends CoreProtocolHandler
return;
}
- // Check if this is a buffer length check, if so the maximum returned data count will be
- // zero
+ // Return an empty security descriptor
+
+ byte[] paramblk = new byte[4];
+ DataPacker.putIntelInt(0, paramblk, 0);
- if (tbuf.getReturnDataLimit() == 0)
- {
-
- // Return the security descriptor length in the parameter block
-
- byte[] paramblk = new byte[4];
- DataPacker.putIntelInt(_sdEveryOne.length, paramblk, 0);
-
- // Initialize the transaction reply
-
- outPkt.initTransactReply(paramblk, paramblk.length, null, 0);
-
- // Set a warning status to indicate the supplied data buffer was too small to return the
- // security
- // descriptor
-
- outPkt.setLongErrorCode(SMBStatus.NTBufferTooSmall);
- }
- else
- {
-
- // Return the security descriptor length in the parameter block
-
- byte[] paramblk = new byte[4];
- DataPacker.putIntelInt(_sdEveryOne.length, paramblk, 0);
-
- // Initialize the transaction reply. Return the fixed security descriptor that allows
- // anyone to access the
- // file/directory
-
- outPkt.initTransactReply(paramblk, paramblk.length, _sdEveryOne, _sdEveryOne.length);
- }
+ outPkt.initTransactReply(paramblk, paramblk.length, null, 0);
// Send back the response
diff --git a/source/java/org/alfresco/filesys/smb/server/SMBServer.java b/source/java/org/alfresco/filesys/smb/server/SMBServer.java
index 60644ee9a2..ce08072aec 100644
--- a/source/java/org/alfresco/filesys/smb/server/SMBServer.java
+++ b/source/java/org/alfresco/filesys/smb/server/SMBServer.java
@@ -78,7 +78,7 @@ public class SMBServer extends NetworkFileServer implements Runnable
// Server type flags, used when announcing the host
- private int m_srvType = ServerType.WorkStation + ServerType.Server;
+ private int m_srvType = ServerType.WorkStation + ServerType.Server + ServerType.NTServer;
// Next available session id
diff --git a/source/java/org/alfresco/filesys/smb/server/repo/ContentDiskDriver.java b/source/java/org/alfresco/filesys/smb/server/repo/ContentDiskDriver.java
index 954b82d55a..0d57ee05c3 100644
--- a/source/java/org/alfresco/filesys/smb/server/repo/ContentDiskDriver.java
+++ b/source/java/org/alfresco/filesys/smb/server/repo/ContentDiskDriver.java
@@ -311,7 +311,8 @@ public class ContentDiskDriver implements DiskInterface, IOCtlInterface
// Set parameters
- context.setFilesystemAttributes(FileSystem.CasePreservedNames);
+ context.setFilesystemAttributes(FileSystem.CasePreservedNames + FileSystem.UnicodeOnDisk +
+ FileSystem.CaseSensitiveSearch);
}
catch (Exception ex)
{
@@ -597,10 +598,8 @@ public class ContentDiskDriver implements DiskInterface, IOCtlInterface
{
// a valid use case
if (logger.isDebugEnabled())
- {
logger.debug("Getting file information - File not found: \n" +
" path: " + path);
- }
throw e;
}
catch (org.alfresco.repo.security.permissions.AccessDeniedException ex)
@@ -1448,7 +1447,8 @@ public class ContentDiskDriver implements DiskInterface, IOCtlInterface
try
{
- // get the node
+ // Get the node
+
NodeRef nodeRef = getNodeForPath(tree, name);
if (nodeService.exists(nodeRef))
{
@@ -1468,15 +1468,6 @@ public class ContentDiskDriver implements DiskInterface, IOCtlInterface
" node: " + nodeRef);
}
}
- catch (FileNotFoundException e)
- {
- // already gone
- if (logger.isDebugEnabled())
- {
- logger.debug("Deleted file : \n" +
- " file: " + name);
- }
- }
catch (NodeLockedException ex)
{
// Debug
@@ -1610,8 +1601,11 @@ public class ContentDiskDriver implements DiskInterface, IOCtlInterface
// DEBUG
- if ( logger.isDebugEnabled())
+ if ( logger.isDebugEnabled())
+ {
logger.debug("Cached rename state for " + oldName + ", state=" + fstate);
+ logger.debug(" new name " + newName + ", state=" + newState);
+ }
}
}
else
diff --git a/source/java/org/alfresco/filesys/smb/server/repo/FileState.java b/source/java/org/alfresco/filesys/smb/server/repo/FileState.java
index 1a5844df79..dc87680438 100644
--- a/source/java/org/alfresco/filesys/smb/server/repo/FileState.java
+++ b/source/java/org/alfresco/filesys/smb/server/repo/FileState.java
@@ -20,7 +20,6 @@ import org.alfresco.filesys.locking.FileLock;
import org.alfresco.filesys.locking.FileLockList;
import org.alfresco.filesys.locking.LockConflictException;
import org.alfresco.filesys.locking.NotLockedException;
-import org.alfresco.filesys.server.filesys.FileName;
import org.alfresco.filesys.server.filesys.FileOpenParams;
import org.alfresco.filesys.server.filesys.FileStatus;
import org.alfresco.filesys.smb.SharingMode;
@@ -587,35 +586,7 @@ public class FileState
*/
public final static String normalizePath(String path)
{
-
- // Split the path into directories and file name, only uppercase the directories to
- // normalize the path.
-
- String normPath = path;
-
- if (path.length() > 3)
- {
-
- // Split the path to seperate the folders/file name
-
- int pos = path.lastIndexOf(FileName.DOS_SEPERATOR);
- if (pos != -1)
- {
-
- // Get the path and file name parts, normalize the path
-
- String pathPart = path.substring(0, pos).toUpperCase();
- String namePart = path.substring(pos);
-
- // Rebuild the path string
-
- normPath = pathPart + namePart;
- }
- }
-
- // Return the normalized path
-
- return normPath;
+ return path.toUpperCase();
}
/**
diff --git a/source/java/org/alfresco/filesys/smb/server/repo/FileStateTable.java b/source/java/org/alfresco/filesys/smb/server/repo/FileStateTable.java
index 835fb04e03..2eb2797979 100644
--- a/source/java/org/alfresco/filesys/smb/server/repo/FileStateTable.java
+++ b/source/java/org/alfresco/filesys/smb/server/repo/FileStateTable.java
@@ -448,7 +448,7 @@ public class FileStateTable implements Runnable
// Dump the file state cache entries to the specified stream
if (m_stateTable.size() > 0)
- logger.info("++ FileStateCache Entries:");
+ logger.debug("++ FileStateCache Entries:");
Enumeration enm = m_stateTable.keys();
long curTime = System.currentTimeMillis();
@@ -458,7 +458,7 @@ public class FileStateTable implements Runnable
String fname = (String) enm.nextElement();
FileState state = m_stateTable.get(fname);
- logger.info(" ++ " + fname + "(" + state.getSecondsToExpire(curTime) + ") : " + state);
+ logger.debug(" ++ " + fname + "(" + state.getSecondsToExpire(curTime) + ") : " + state);
}
}
}
\ No newline at end of file
diff --git a/source/java/org/alfresco/filesys/smb/server/win32/Win32NetBIOSLanaMonitor.java b/source/java/org/alfresco/filesys/smb/server/win32/Win32NetBIOSLanaMonitor.java
index e7cec88150..bd9f9d90d4 100644
--- a/source/java/org/alfresco/filesys/smb/server/win32/Win32NetBIOSLanaMonitor.java
+++ b/source/java/org/alfresco/filesys/smb/server/win32/Win32NetBIOSLanaMonitor.java
@@ -39,7 +39,7 @@ public class Win32NetBIOSLanaMonitor extends Thread
//
// Initial LANA listener array size
- private static final int LanaListenerArraySize = 16;
+ private static final int LanaListenerArraySize = 256;
// Debug logging
@@ -153,24 +153,7 @@ public class Win32NetBIOSLanaMonitor extends Thread
// Check if the listener array has been allocated
if ( m_listeners == null)
- {
- int len = LanaListenerArraySize;
- if ( lana > len)
- len = (lana + 3) & 0x00FC;
-
- m_listeners = new LanaListener[len];
- }
- else if ( lana >= m_listeners.length)
- {
- // Extend the LANA listener array
-
- LanaListener[] newArray = new LanaListener[(lana + 3) & 0x00FC];
-
- // Copy the existing array to the extended array
-
- System.arraycopy(m_listeners, 0, newArray, 0, m_listeners.length);
- m_listeners = newArray;
- }
+ m_listeners = new LanaListener[LanaListenerArraySize];
// Add the LANA listener
@@ -343,6 +326,10 @@ public class Win32NetBIOSLanaMonitor extends Thread
m_lanas.set(lana);
m_lanaSts.set(lana, true);
+
+ // Add a listener for the new LANA
+
+ addLanaListener( sessHandler.getLANANumber(), sessHandler);
}
}
else
diff --git a/source/java/org/alfresco/jcr/session/SessionImpl.java b/source/java/org/alfresco/jcr/session/SessionImpl.java
index b772e02d6a..ef4dd0d3f8 100644
--- a/source/java/org/alfresco/jcr/session/SessionImpl.java
+++ b/source/java/org/alfresco/jcr/session/SessionImpl.java
@@ -645,19 +645,29 @@ public class SessionImpl implements Session
if (isLive())
{
// invalidate authentication
- getRepositoryImpl().getServiceRegistry().getAuthenticationService().invalidateTicket(getTicket());
- ticket = null;
-
- // clean up resources
try
{
- sessionIsolation.rollback();
+ try
+ {
+ getRepositoryImpl().getServiceRegistry().getAuthenticationService().invalidateTicket(getTicket());
+ }
+ finally
+ {
+ try
+ {
+ sessionIsolation.rollback();
+ }
+ catch(RepositoryException e)
+ {
+ // continue execution and force logout
+ }
+ }
}
- catch(RepositoryException e)
+ finally
{
- // force logout
+ ticket = null;
+ repository.deregisterSession();
}
- repository.deregisterSession();
}
}
diff --git a/source/java/org/alfresco/jcr/session/SessionImplTest.java b/source/java/org/alfresco/jcr/session/SessionImplTest.java
index 749bf13f3f..c0ea777968 100644
--- a/source/java/org/alfresco/jcr/session/SessionImplTest.java
+++ b/source/java/org/alfresco/jcr/session/SessionImplTest.java
@@ -94,5 +94,29 @@ public class SessionImplTest extends BaseJCRTest
assertFalse(isLive);
}
+
+ public void testSessionThread()
+ {
+ SimpleCredentials superuser = new SimpleCredentials("superuser", "".toCharArray());
+ try
+ {
+ Session anotherSession = repository.login(superuser, getWorkspace());
+ fail("Exception not thrown when establishing two sessions on same thread");
+ }
+ catch(RepositoryException e)
+ {
+ // successful - multiple sessions on one thread caught
+ }
+ superuserSession.logout();
+ try
+ {
+ Session anotherSession = repository.login(superuser, getWorkspace());
+ anotherSession.logout();
+ }
+ catch(RepositoryException e)
+ {
+ fail("Exception thrown when it shouldn't of been.");
+ }
+ }
+
}
-
diff --git a/source/java/org/alfresco/repo/action/executer/TransformActionExecuter.java b/source/java/org/alfresco/repo/action/executer/TransformActionExecuter.java
index 8400ef1bbf..4ddf81e03d 100644
--- a/source/java/org/alfresco/repo/action/executer/TransformActionExecuter.java
+++ b/source/java/org/alfresco/repo/action/executer/TransformActionExecuter.java
@@ -18,7 +18,6 @@ package org.alfresco.repo.action.executer;
import java.util.List;
-import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.action.ParameterDefinitionImpl;
import org.alfresco.service.cmr.action.Action;
@@ -168,6 +167,10 @@ public class TransformActionExecuter extends ActionExecuterAbstractBase
overwrite = overwriteValue.booleanValue();
}
+ // Calculate the destination name
+ String originalName = (String)nodeService.getProperty(actionedUponNodeRef, ContentModel.PROP_NAME);
+ String newName = transformName(originalName, mimeType);
+
// Since we are overwriting we need to figure out whether the destination node exists
NodeRef copyNodeRef = null;
if (overwrite == true)
@@ -182,9 +185,10 @@ public class TransformActionExecuter extends ActionExecuterAbstractBase
if (this.nodeService.hasAspect(copy, ContentModel.ASPECT_WORKING_COPY) == false)
{
// We can assume that we are looking for a node created by this action so the primary parent will
- // match the destination folder
+ // match the destination folder and the name will be the same
NodeRef parent = this.nodeService.getPrimaryParent(copy).getParentRef();
- if (parent.equals(destinationParent) == true)
+ String copyName = (String)this.nodeService.getProperty(copy, ContentModel.PROP_NAME);
+ if (parent.equals(destinationParent) == true && copyName.equals(newName) == true)
{
if (copyNodeRef == null)
{
@@ -212,58 +216,46 @@ public class TransformActionExecuter extends ActionExecuterAbstractBase
destinationAssocQName,
false);
newCopy = true;
- }
+ }
- // Get the content reader
- ContentReader contentReader = this.contentService.getReader(actionedUponNodeRef, ContentModel.PROP_CONTENT);
- if (contentReader == null)
- {
- // for some reason, this action is premature
- throw new AlfrescoRuntimeException(
- "Attempting to execute content transformation rule " +
- "but content has not finished writing, i.e. no URL is available.");
- }
- String originalMimetype = contentReader.getMimetype();
-
- // get the writer and set it up
- ContentWriter contentWriter = this.contentService.getWriter(copyNodeRef, ContentModel.PROP_CONTENT, true);
- contentWriter.setMimetype(mimeType); // new mimetype
- contentWriter.setEncoding(contentReader.getEncoding()); // original encoding
-
if (newCopy == true)
{
// Adjust the name of the copy
- String originalName = (String)nodeService.getProperty(actionedUponNodeRef, ContentModel.PROP_NAME);
- String newName = transformName(originalName, originalMimetype, mimeType);
nodeService.setProperty(copyNodeRef, ContentModel.PROP_NAME, newName);
String originalTitle = (String)nodeService.getProperty(actionedUponNodeRef, ContentModel.PROP_TITLE);
if (originalTitle != null && originalTitle.length() > 0)
{
- String newTitle = transformName(originalTitle, originalMimetype, mimeType);
+ String newTitle = transformName(originalTitle, mimeType);
nodeService.setProperty(copyNodeRef, ContentModel.PROP_TITLE, newTitle);
}
}
-
- // Try and transform the content
- try
+
+ // Get the content reader
+ ContentReader contentReader = this.contentService.getReader(actionedUponNodeRef, ContentModel.PROP_CONTENT);
+ // Only do the transformation if some content is available
+ if (contentReader != null)
{
- doTransform(ruleAction, contentReader, contentWriter);
- }
- catch(NoTransformerException e)
- {
- if (logger.isDebugEnabled())
+ // get the writer and set it up
+ ContentWriter contentWriter = this.contentService.getWriter(copyNodeRef, ContentModel.PROP_CONTENT, true);
+ contentWriter.setMimetype(mimeType); // new mimetype
+ contentWriter.setEncoding(contentReader.getEncoding()); // original encoding
+
+ // Try and transform the content
+ try
{
- logger.debug("No transformer found to execute rule: \n" +
- " reader: " + contentReader + "\n" +
- " writer: " + contentWriter + "\n" +
- " action: " + this);
+ doTransform(ruleAction, contentReader, contentWriter);
}
- //if (newCopy == true)
- //{
- // TODO: Revisit this for alternative solutions
- // nodeService.deleteNode(copyNodeRef);
- // }
- }
+ catch(NoTransformerException e)
+ {
+ if (logger.isDebugEnabled())
+ {
+ logger.debug("No transformer found to execute rule: \n" +
+ " reader: " + contentReader + "\n" +
+ " writer: " + contentWriter + "\n" +
+ " action: " + this);
+ }
+ }
+ }
}
protected void doTransform(Action ruleAction, ContentReader contentReader, ContentWriter contentWriter)
@@ -279,7 +271,7 @@ public class TransformActionExecuter extends ActionExecuterAbstractBase
* @param newMimetype
* @return
*/
- private String transformName(String original, String originalMimetype, String newMimetype)
+ private String transformName(String original, String newMimetype)
{
// get the current extension
int dotIndex = original.lastIndexOf('.');
diff --git a/source/java/org/alfresco/repo/admin/patch/impl/TopLevelGroupParentChildAssociationTypePatch.java b/source/java/org/alfresco/repo/admin/patch/impl/TopLevelGroupParentChildAssociationTypePatch.java
index 786c713334..6a552121ca 100644
--- a/source/java/org/alfresco/repo/admin/patch/impl/TopLevelGroupParentChildAssociationTypePatch.java
+++ b/source/java/org/alfresco/repo/admin/patch/impl/TopLevelGroupParentChildAssociationTypePatch.java
@@ -25,14 +25,14 @@ import org.alfresco.repo.admin.patch.AbstractPatch;
import org.alfresco.repo.security.authority.AuthorityDAOImpl;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
-import org.alfresco.service.cmr.repository.NodeService;
-import org.alfresco.service.namespace.NamespacePrefixResolver;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.namespace.RegexQNamePattern;
public class TopLevelGroupParentChildAssociationTypePatch extends AbstractPatch
{
private static final String MSG_RESULT = "patch.topLevelGroupParentChildAssociationTypePatch.result";
+ private static final String ERR_SYS_PATH_NOT_FOUND = "patch.topLevelGroupParentChildAssociationTypePatch.err.sys_path_not_found";
+ private static final String ERR_AUTH_PATH_NOT_FOUND = "patch.topLevelGroupParentChildAssociationTypePatch.err.auth_path_not_found";
public TopLevelGroupParentChildAssociationTypePatch()
{
@@ -48,12 +48,15 @@ public class TopLevelGroupParentChildAssociationTypePatch extends AbstractPatch
{
if (!car.getTypeQName().equals(ContentModel.ASSOC_CHILDREN))
{
- nodeService
- .moveNode(car.getChildRef(), car.getParentRef(), ContentModel.ASSOC_CHILDREN, car.getQName());
+ nodeService.moveNode(
+ car.getChildRef(),
+ car.getParentRef(),
+ ContentModel.ASSOC_CHILDREN,
+ car.getQName());
}
}
- return I18NUtil.getMessage(MSG_RESULT);
+ return I18NUtil.getMessage(MSG_RESULT, results.size());
}
private NodeRef getAuthorityContainer()
@@ -68,7 +71,7 @@ public class TopLevelGroupParentChildAssociationTypePatch extends AbstractPatch
NodeRef sysNodeRef = null;
if (results.size() == 0)
{
- throw new AlfrescoRuntimeException("Required authority system path not found: " + qnameAssocSystem);
+ throw new AlfrescoRuntimeException(ERR_SYS_PATH_NOT_FOUND, new Object[] {qnameAssocSystem});
}
else
{
@@ -78,7 +81,7 @@ public class TopLevelGroupParentChildAssociationTypePatch extends AbstractPatch
NodeRef authNodeRef = null;
if (results.size() == 0)
{
- throw new AlfrescoRuntimeException("Required authority path not found: " + qnameAssocAuthorities);
+ throw new AlfrescoRuntimeException(ERR_AUTH_PATH_NOT_FOUND, new Object[] {qnameAssocAuthorities});
}
else
{
diff --git a/source/java/org/alfresco/repo/coci/CheckOutCheckInServiceImpl.java b/source/java/org/alfresco/repo/coci/CheckOutCheckInServiceImpl.java
index 06cad43364..ccfb32b96c 100644
--- a/source/java/org/alfresco/repo/coci/CheckOutCheckInServiceImpl.java
+++ b/source/java/org/alfresco/repo/coci/CheckOutCheckInServiceImpl.java
@@ -140,7 +140,7 @@ public class CheckOutCheckInServiceImpl implements CheckOutCheckInService
}
/**
- * Sets the authenticatin service
+ * Sets the authentication service
*
* @param authenticationService the authentication service
*/
@@ -244,7 +244,7 @@ public class CheckOutCheckInServiceImpl implements CheckOutCheckInService
workingCopyProperties.put(ContentModel.PROP_WORKING_COPY_OWNER, userName);
this.nodeService.addAspect(workingCopy, ContentModel.ASPECT_WORKING_COPY, workingCopyProperties);
- // Lock the origional node
+ // Lock the original node
this.lockService.lock(nodeRef, LockType.READ_ONLY_LOCK);
// Return the working copy
@@ -307,11 +307,11 @@ public class CheckOutCheckInServiceImpl implements CheckOutCheckInService
try
{
Map workingCopyProperties = nodeService.getProperties(workingCopyNodeRef);
- // Try and get the origional node reference
+ // Try and get the original node reference
nodeRef = (NodeRef) workingCopyProperties.get(ContentModel.PROP_COPY_REFERENCE);
if(nodeRef == null)
{
- // Error since the origional node can not be found
+ // Error since the original node can not be found
throw new CheckOutCheckInServiceException(MSG_ERR_BAD_COPY);
}
@@ -347,7 +347,7 @@ public class CheckOutCheckInServiceImpl implements CheckOutCheckInService
contentData);
}
- // Copy the contents of the working copy onto the origional
+ // Copy the contents of the working copy onto the original
this.copyService.copy(workingCopyNodeRef, nodeRef);
if (versionProperties != null && this.nodeService.hasAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE) == true)
@@ -364,7 +364,7 @@ public class CheckOutCheckInServiceImpl implements CheckOutCheckInService
}
else
{
- // Re-lock the origional node
+ // Re-lock the original node
this.lockService.lock(nodeRef, LockType.READ_ONLY_LOCK);
}
}
@@ -421,15 +421,15 @@ public class CheckOutCheckInServiceImpl implements CheckOutCheckInService
// Ensure that the node has the copy aspect
if (this.nodeService.hasAspect(workingCopyNodeRef, ContentModel.ASPECT_COPIEDFROM) == true)
{
- // Get the origional node
+ // Get the original node
nodeRef = (NodeRef)this.nodeService.getProperty(workingCopyNodeRef, ContentModel.PROP_COPY_REFERENCE);
if (nodeRef == null)
{
- // Error since the origional node can not be found
+ // Error since the original node can not be found
throw new CheckOutCheckInServiceException(MSG_ERR_BAD_COPY);
}
- // Release the lock on the origional node
+ // Release the lock on the original node
this.lockService.unlock(nodeRef);
// Delete the working copy
@@ -452,7 +452,7 @@ public class CheckOutCheckInServiceImpl implements CheckOutCheckInService
{
NodeRef workingCopy = null;
- // Do a search to find the origional document
+ // Do a search to find the working copy document
ResultSet resultSet = null;
try
{
diff --git a/source/java/org/alfresco/repo/content/MimetypeMap.java b/source/java/org/alfresco/repo/content/MimetypeMap.java
index 971ea17542..08b9d49f24 100644
--- a/source/java/org/alfresco/repo/content/MimetypeMap.java
+++ b/source/java/org/alfresco/repo/content/MimetypeMap.java
@@ -40,6 +40,8 @@ import org.apache.commons.logging.LogFactory;
*/
public class MimetypeMap implements MimetypeService
{
+ public static final String EXTENSION_BINARY = "bin";
+
public static final String MIMETYPE_TEXT_PLAIN = "text/plain";
public static final String MIMETYPE_TEXT_CSS = "text/css";
public static final String MIMETYPE_XML = "text/xml";
@@ -87,6 +89,8 @@ public class MimetypeMap implements MimetypeService
public static final String MIMETYPE_STAROFFICE5_WRITER = "application/vnd.stardivision.writer";
public static final String MIMETYPE_STAROFFICE5_WRITER_GLOBAL = "application/vnd.stardivision.writer-global";
public static final String MIMETYPE_STAROFFICE5_MATH = "application/vnd.stardivision.math";
+ // WordPerfect
+ public static final String MIMETYPE_WORDPERFECT = "application/wordperfect";
// Audio
public static final String MIMETYPE_MP3 = "audio/x-mpeg";
// Alfresco
@@ -207,18 +211,26 @@ public class MimetypeMap implements MimetypeService
}
/**
+ * Get the file extension associated with the mimetype.
+ *
* @param mimetype a valid mimetype
- * @return Returns the default extension for the mimetype
- * @throws AlfrescoRuntimeException if the mimetype doesn't exist
+ * @return Returns the default extension for the mimetype. Returns the {@link #MIMETYPE_BINARY binary}
+ * mimetype extension.
+ *
+ * @see #MIMETYPE_BINARY
+ * @see #EXTENSION_BINARY
*/
public String getExtension(String mimetype)
{
String extension = extensionsByMimetype.get(mimetype);
if (extension == null)
{
- throw new AlfrescoRuntimeException("No extension available for mimetype: " + mimetype);
+ return EXTENSION_BINARY;
+ }
+ else
+ {
+ return extension;
}
- return extension;
}
public Map getDisplaysByExtension()
diff --git a/source/java/org/alfresco/repo/content/metadata/AbstractMetadataExtracter.java b/source/java/org/alfresco/repo/content/metadata/AbstractMetadataExtracter.java
index 3a8b8d4f3e..50548b8089 100644
--- a/source/java/org/alfresco/repo/content/metadata/AbstractMetadataExtracter.java
+++ b/source/java/org/alfresco/repo/content/metadata/AbstractMetadataExtracter.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005 Jesper Steen Møller
+ * Copyright (C) 2005 Jesper Steen M�ller
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
@@ -31,11 +31,11 @@ import org.apache.commons.logging.LogFactory;
/**
*
- * @author Jesper Steen Møller
+ * @author Jesper Steen Møller
*/
abstract public class AbstractMetadataExtracter implements MetadataExtracter
{
- private static Log logger = LogFactory.getLog(AbstractMetadataExtracter.class);
+ protected static Log logger = LogFactory.getLog(AbstractMetadataExtracter.class);
private MimetypeService mimetypeService;
private MetadataExtracterRegistry registry;
diff --git a/source/java/org/alfresco/repo/content/metadata/AbstractMetadataExtracterTest.java b/source/java/org/alfresco/repo/content/metadata/AbstractMetadataExtracterTest.java
index 51ead94f88..bbb17d5153 100644
--- a/source/java/org/alfresco/repo/content/metadata/AbstractMetadataExtracterTest.java
+++ b/source/java/org/alfresco/repo/content/metadata/AbstractMetadataExtracterTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005 Jesper Steen Møller
+ * Copyright (C) 2005 Jesper Steen M�ller
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
@@ -38,7 +38,7 @@ import org.springframework.context.ApplicationContext;
* @see org.alfresco.repo.content.metadata.MetadataExtracter
* @see org.alfresco.repo.content.metadata.AbstractMetadataExtracter
*
- * @author Jesper Steen Møller
+ * @author Jesper Steen Møller
*/
public abstract class AbstractMetadataExtracterTest extends TestCase
{
diff --git a/source/java/org/alfresco/repo/content/metadata/HtmlMetadataExtracter.java b/source/java/org/alfresco/repo/content/metadata/HtmlMetadataExtracter.java
index 63b731e3c2..d8c4657c50 100644
--- a/source/java/org/alfresco/repo/content/metadata/HtmlMetadataExtracter.java
+++ b/source/java/org/alfresco/repo/content/metadata/HtmlMetadataExtracter.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005 Jesper Steen Møller
+ * Copyright (C) 2005 Jesper Steen M�ller
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
@@ -38,7 +38,7 @@ import org.alfresco.service.namespace.QName;
/**
*
- * @author Jesper Steen Møller
+ * @author Jesper Steen Møller
*/
public class HtmlMetadataExtracter extends AbstractMetadataExtracter
{
diff --git a/source/java/org/alfresco/repo/content/metadata/HtmlMetadataExtracterTest.java b/source/java/org/alfresco/repo/content/metadata/HtmlMetadataExtracterTest.java
index a4ed6efaba..986c67a9d4 100644
--- a/source/java/org/alfresco/repo/content/metadata/HtmlMetadataExtracterTest.java
+++ b/source/java/org/alfresco/repo/content/metadata/HtmlMetadataExtracterTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005 Jesper Steen Møller
+ * Copyright (C) 2005 Jesper Steen M�ller
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
@@ -19,7 +19,7 @@ package org.alfresco.repo.content.metadata;
import org.alfresco.repo.content.MimetypeMap;
/**
- * @author Jesper Steen Møller
+ * @author Jesper Steen Møller
*/
public class HtmlMetadataExtracterTest extends AbstractMetadataExtracterTest
{
diff --git a/source/java/org/alfresco/repo/content/metadata/MailMetadataExtracter.java b/source/java/org/alfresco/repo/content/metadata/MailMetadataExtracter.java
new file mode 100644
index 0000000000..6f527ece14
--- /dev/null
+++ b/source/java/org/alfresco/repo/content/metadata/MailMetadataExtracter.java
@@ -0,0 +1,180 @@
+/*
+ * Copyright (C) 2005 Jesper Steen M�ller
+ *
+ * Licensed under the Mozilla Public License version 1.1
+ * with a permitted attribution clause. You may obtain a
+ * copy of the License at
+ *
+ * http://www.alfresco.org/legal/license.txt
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific
+ * language governing permissions and limitations under the
+ * License.
+ */
+package org.alfresco.repo.content.metadata;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+
+import org.alfresco.service.cmr.repository.ContentIOException;
+import org.alfresco.service.cmr.repository.ContentReader;
+import org.alfresco.service.namespace.NamespaceService;
+import org.alfresco.service.namespace.QName;
+import org.apache.poi.poifs.eventfilesystem.POIFSReader;
+import org.apache.poi.poifs.eventfilesystem.POIFSReaderEvent;
+import org.apache.poi.poifs.eventfilesystem.POIFSReaderListener;
+import org.apache.poi.poifs.filesystem.DocumentInputStream;
+
+/**
+ * Outlook format email meta-data extractor
+ *
+ * @author Kevin Roast
+ */
+public class MailMetadataExtracter extends AbstractMetadataExtracter
+{
+ public static String[] SUPPORTED_MIMETYPES = new String[] {
+ "message/rfc822"};
+
+ private static final String SUBSTG_MESSAGEBODY = "__substg1.0_1000001E";
+ private static final String SUBSTG_RECIPIENTEMAIL = "__substg1.0_39FE001E";
+ private static final String SUBSTG_RECEIVEDEMAIL = "__substg1.0_0076001E";
+ private static final String SUBSTG_SENDEREMAIL = "__substg1.0_0C1F001E";
+ private static final String SUBSTG_DATE = "__substg1.0_00470102";
+
+ private static final QName ASPECT_MAILED = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "emailed");
+ private static final QName PROP_SENTDATE = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "sentdate");
+ private static final QName PROP_ORIGINATOR = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "originator");
+ private static final QName PROP_ADDRESSEE = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "addressee");
+ private static final QName PROP_ADDRESSEES = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "addressees");
+
+ // the CC: email addresses
+ private ThreadLocal> receipientEmails = new ThreadLocal>();
+
+ public MailMetadataExtracter()
+ {
+ super(new HashSet(Arrays.asList(SUPPORTED_MIMETYPES)), 1.0, 1000);
+ }
+
+ public void extractInternal(ContentReader reader, final Map destination) throws Throwable
+ {
+ POIFSReaderListener readerListener = new POIFSReaderListener()
+ {
+ public void processPOIFSReaderEvent(final POIFSReaderEvent event)
+ {
+ try
+ {
+ String name = event.getName();
+
+ if (name.equals(SUBSTG_RECIPIENTEMAIL)) // a recipient email address
+ {
+ String emailAddress = readPlainTextStream(event.getStream());
+ receipientEmails.get().add(convertExchangeAddress(emailAddress));
+ }
+ else if (name.equals(SUBSTG_RECEIVEDEMAIL)) // receiver email address
+ {
+ String emailAddress = readPlainTextStream(event.getStream());
+ destination.put(PROP_ADDRESSEE, convertExchangeAddress(emailAddress));
+ }
+ else if (name.equals(SUBSTG_SENDEREMAIL)) // sender email - NOTE either email OR full Exchange data e.g. : /O=HOSTEDSERVICE2/OU=FIRST ADMINISTRATIVE GROUP/CN=RECIPIENTS/CN=MIKE.FARMAN@BEN
+ {
+ String emailAddress = readPlainTextStream(event.getStream());
+ destination.put(PROP_ORIGINATOR, convertExchangeAddress(emailAddress));
+ }
+ else if (name.equals(SUBSTG_DATE))
+ {
+ // the date is not really plain text - but it's easier to parse as such
+ String date = readPlainTextStream(event.getStream());
+ int valueIndex = date.indexOf("l=");
+ if (valueIndex != -1)
+ {
+ int dateIndex = date.indexOf('-', valueIndex);
+ if (dateIndex != -1)
+ {
+ dateIndex++;
+ String strYear = date.substring(dateIndex, dateIndex + 2);
+ int year = Integer.parseInt(strYear) + (2000 - 1900);
+ String strMonth = date.substring(dateIndex + 2, dateIndex + 4);
+ int month = Integer.parseInt(strMonth) - 1;
+ String strDay = date.substring(dateIndex + 4, dateIndex + 6);
+ int day = Integer.parseInt(strDay);
+ String strHour = date.substring(dateIndex + 6, dateIndex + 8);
+ int hour = Integer.parseInt(strHour);
+ String strMinute = date.substring(dateIndex + 10, dateIndex + 12);
+ int minute = Integer.parseInt(strMinute);
+ destination.put(PROP_SENTDATE, new Date(year, month, day, hour, minute));
+ }
+ }
+ }
+ }
+ catch (Exception ex)
+ {
+ throw new ContentIOException("Property set stream: " + event.getPath() + event.getName(), ex);
+ }
+ }
+ };
+
+ InputStream is = null;
+ try
+ {
+ this.receipientEmails.set(new ArrayList());
+
+ is = reader.getContentInputStream();
+ POIFSReader poiFSReader = new POIFSReader();
+ poiFSReader.registerListener(readerListener);
+
+ try
+ {
+ poiFSReader.read(is);
+ }
+ catch (IOException err)
+ {
+ // probably not an Outlook format MSG - ignore for now
+ logger.warn("Unable to extract meta-data from message: " + err.getMessage());
+ }
+
+ // store multi-value extracted property
+ if (receipientEmails.get().size() != 0)
+ {
+ destination.put(PROP_ADDRESSEES, (Serializable)receipientEmails.get());
+ }
+ }
+ finally
+ {
+ if (is != null)
+ {
+ try { is.close(); } catch (IOException e) {}
+ }
+ }
+ }
+
+ private static String readPlainTextStream(DocumentInputStream stream)
+ throws IOException
+ {
+ byte[] data = new byte[stream.available()];
+ int read = stream.read(data);
+ return new String(data);
+ }
+
+ private static String convertExchangeAddress(String email)
+ {
+ if (email.lastIndexOf("/CN=") == -1)
+ {
+ return email;
+ }
+ else
+ {
+ // found a full Exchange format To header
+ return email.substring(email.lastIndexOf("/CN=") + 4);
+ }
+ }
+}
diff --git a/source/java/org/alfresco/repo/content/metadata/MetadataExtracter.java b/source/java/org/alfresco/repo/content/metadata/MetadataExtracter.java
index 50b61930da..1cc07c5dc7 100644
--- a/source/java/org/alfresco/repo/content/metadata/MetadataExtracter.java
+++ b/source/java/org/alfresco/repo/content/metadata/MetadataExtracter.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005 Jesper Steen Møller
+ * Copyright (C) 2005 Jesper Steen M�ller
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
@@ -25,7 +25,7 @@ import org.alfresco.service.namespace.QName;
/**
*
- * @author Jesper Steen Møller
+ * @author Jesper Steen Møller
*/
public interface MetadataExtracter
{
diff --git a/source/java/org/alfresco/repo/content/metadata/MetadataExtracterRegistry.java b/source/java/org/alfresco/repo/content/metadata/MetadataExtracterRegistry.java
index 0a3fd4fe1a..8dd87fb63b 100644
--- a/source/java/org/alfresco/repo/content/metadata/MetadataExtracterRegistry.java
+++ b/source/java/org/alfresco/repo/content/metadata/MetadataExtracterRegistry.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005 Jesper Steen Møller
+ * Copyright (C) 2005 Jesper Steen M�ller
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
@@ -36,7 +36,7 @@ import org.apache.commons.logging.LogFactory;
* The extracters themselves know how well they are able to extract metadata.
*
* @see org.alfresco.repo.content.metadata.MetadataExtracter
- * @author Jesper Steen Møller
+ * @author Jesper Steen Møller
*/
public class MetadataExtracterRegistry
{
diff --git a/source/java/org/alfresco/repo/content/metadata/OfficeMetadataExtracter.java b/source/java/org/alfresco/repo/content/metadata/OfficeMetadataExtracter.java
index 250f9bdfc2..179be80aa7 100644
--- a/source/java/org/alfresco/repo/content/metadata/OfficeMetadataExtracter.java
+++ b/source/java/org/alfresco/repo/content/metadata/OfficeMetadataExtracter.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005 Jesper Steen Møller
+ * Copyright (C) 2005 Jesper Steen M�ller
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
@@ -28,7 +28,6 @@ import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.namespace.QName;
-import org.apache.poi.hpsf.DocumentSummaryInformation;
import org.apache.poi.hpsf.PropertySet;
import org.apache.poi.hpsf.PropertySetFactory;
import org.apache.poi.hpsf.SummaryInformation;
@@ -37,15 +36,16 @@ import org.apache.poi.poifs.eventfilesystem.POIFSReaderEvent;
import org.apache.poi.poifs.eventfilesystem.POIFSReaderListener;
/**
+ * Office file format Metadata Extracter
*
- * @author Jesper Steen Møller
+ * @author Jesper Steen Møller
*/
public class OfficeMetadataExtracter extends AbstractMetadataExtracter
{
public static String[] SUPPORTED_MIMETYPES = new String[] {
MimetypeMap.MIMETYPE_WORD,
MimetypeMap.MIMETYPE_EXCEL,
- MimetypeMap.MIMETYPE_PPT };
+ MimetypeMap.MIMETYPE_PPT};
public OfficeMetadataExtracter()
{
@@ -64,6 +64,7 @@ public class OfficeMetadataExtracter extends AbstractMetadataExtracter
if (ps instanceof SummaryInformation)
{
SummaryInformation si = (SummaryInformation) ps;
+
// Titled aspect
trimPut(ContentModel.PROP_TITLE, si.getTitle(), destination);
trimPut(ContentModel.PROP_DESCRIPTION, si.getSubject(), destination);
@@ -73,16 +74,6 @@ public class OfficeMetadataExtracter extends AbstractMetadataExtracter
trimPut(ContentModel.PROP_MODIFIED, si.getLastSaveDateTime(), destination);
trimPut(ContentModel.PROP_AUTHOR, si.getAuthor(), destination);
}
- else if (ps instanceof DocumentSummaryInformation)
- {
-// DocumentSummaryInformation dsi = (DocumentSummaryInformation) ps;
-
- // These are not really interesting to any aspect:
- // trimPut(ContentModel.PROP_xxx, dsi.getCompany(),
- // destination);
- // trimPut(ContentModel.PROP_yyy, dsi.getManager(),
- // destination);
- }
}
catch (Exception ex)
{
@@ -90,6 +81,7 @@ public class OfficeMetadataExtracter extends AbstractMetadataExtracter
}
}
};
+
InputStream is = null;
try
{
diff --git a/source/java/org/alfresco/repo/content/metadata/OfficeMetadataExtracterTest.java b/source/java/org/alfresco/repo/content/metadata/OfficeMetadataExtracterTest.java
index 2630ee4ab1..6249415fdd 100644
--- a/source/java/org/alfresco/repo/content/metadata/OfficeMetadataExtracterTest.java
+++ b/source/java/org/alfresco/repo/content/metadata/OfficeMetadataExtracterTest.java
@@ -4,7 +4,7 @@ package org.alfresco.repo.content.metadata;
/**
* @see org.alfresco.repo.content.transform.OfficeMetadataExtracter
*
- * @author Jesper Steen Møller
+ * @author Jesper Steen Møller
*/
public class OfficeMetadataExtracterTest extends AbstractMetadataExtracterTest
{
diff --git a/source/java/org/alfresco/repo/content/metadata/UnoMetadataExtracter.java b/source/java/org/alfresco/repo/content/metadata/OpenOfficeMetadataExtracter.java
similarity index 83%
rename from source/java/org/alfresco/repo/content/metadata/UnoMetadataExtracter.java
rename to source/java/org/alfresco/repo/content/metadata/OpenOfficeMetadataExtracter.java
index 465d10e759..389b5a46bf 100644
--- a/source/java/org/alfresco/repo/content/metadata/UnoMetadataExtracter.java
+++ b/source/java/org/alfresco/repo/content/metadata/OpenOfficeMetadataExtracter.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005 Jesper Steen Møller
+ * Copyright (C) 2005 Jesper Steen M�ller
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
@@ -24,12 +24,13 @@ import java.util.Arrays;
import java.util.HashSet;
import java.util.Map;
-import net.sf.joott.uno.UnoConnection;
+import net.sf.jooreports.openoffice.connection.OpenOfficeConnection;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.namespace.QName;
+import org.alfresco.util.PropertyCheck;
import org.alfresco.util.TempFileProvider;
import com.sun.star.beans.PropertyValue;
@@ -41,9 +42,9 @@ import com.sun.star.ucb.XFileIdentifierConverter;
import com.sun.star.uno.UnoRuntime;
/**
- * @author Jesper Steen Møller
+ * @author Jesper Steen Møller
*/
-public class UnoMetadataExtracter extends AbstractMetadataExtracter
+public class OpenOfficeMetadataExtracter extends AbstractMetadataExtracter
{
public static String[] SUPPORTED_MIMETYPES = new String[] {
MimetypeMap.MIMETYPE_STAROFFICE5_WRITER,
@@ -55,31 +56,26 @@ public class UnoMetadataExtracter extends AbstractMetadataExtracter
// quality since they involve conversion.
};
- private String contentUrl;
- private MyUnoConnection connection;
+ private OpenOfficeConnection connection;
private boolean isConnected;
- public UnoMetadataExtracter()
+ public OpenOfficeMetadataExtracter()
{
super(new HashSet(Arrays.asList(SUPPORTED_MIMETYPES)), 1.00, 10000);
- this.contentUrl = UnoConnection.DEFAULT_CONNECTION_STRING;
}
- /**
- *
- * @param contentUrl the URL to connect to
- */
- public void setContentUrl(String contentUrl)
+ public void setConnection(OpenOfficeConnection connection)
{
- this.contentUrl = contentUrl;
+ this.connection = connection;
}
-
+
/**
* Initialises the bean by establishing an UNO connection
*/
public synchronized void init()
{
- connection = new MyUnoConnection(contentUrl);
+ PropertyCheck.mandatory("OpenOfficeMetadataExtracter", "connection", connection);
+
// attempt to make an connection
try
{
@@ -109,7 +105,7 @@ public class UnoMetadataExtracter extends AbstractMetadataExtracter
// create temporary files to convert from and to
File tempFromFile = TempFileProvider.createTempFile(
- "UnoContentTransformer_", "."
+ "OpenOfficeMetadataExtracter-", "."
+ getMimetypeService().getExtension(sourceMimetype));
// download the content from the source reader
reader.getContent(tempFromFile);
@@ -158,9 +154,9 @@ public class UnoMetadataExtracter extends AbstractMetadataExtracter
}
}
- public String toUrl(File file, MyUnoConnection connection) throws ConnectException
+ public String toUrl(File file, OpenOfficeConnection connection) throws ConnectException
{
- Object fcp = connection.getFileContentService();
+ Object fcp = connection.getFileContentProvider();
XFileIdentifierConverter fic = (XFileIdentifierConverter) UnoRuntime.queryInterface(
XFileIdentifierConverter.class, fcp);
return fic.getFileURLFromSystemPath("", file.getAbsolutePath());
@@ -181,17 +177,4 @@ public class UnoMetadataExtracter extends AbstractMetadataExtracter
property.Value = value;
return property;
}
-
- static class MyUnoConnection extends UnoConnection
- {
- public MyUnoConnection(String url)
- {
- super(url);
- }
-
- public Object getFileContentService() throws ConnectException
- {
- return getService("com.sun.star.ucb.FileContentProvider");
- }
- }
}
diff --git a/source/java/org/alfresco/repo/content/metadata/UnoMetadataExtracterTest.java b/source/java/org/alfresco/repo/content/metadata/OpenOfficeMetadataExtracterTest.java
similarity index 67%
rename from source/java/org/alfresco/repo/content/metadata/UnoMetadataExtracterTest.java
rename to source/java/org/alfresco/repo/content/metadata/OpenOfficeMetadataExtracterTest.java
index f43013c2a3..9648c52bba 100644
--- a/source/java/org/alfresco/repo/content/metadata/UnoMetadataExtracterTest.java
+++ b/source/java/org/alfresco/repo/content/metadata/OpenOfficeMetadataExtracterTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005 Jesper Steen Møller
+ * Copyright (C) 2005 Jesper Steen M�ller
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
@@ -16,20 +16,27 @@
*/
package org.alfresco.repo.content.metadata;
+import net.sf.jooreports.openoffice.connection.OpenOfficeConnection;
+import net.sf.jooreports.openoffice.connection.SocketOpenOfficeConnection;
+
/**
- * @author Jesper Steen Møller
+ * @author Jesper Steen Møller
*/
-public class UnoMetadataExtracterTest extends AbstractMetadataExtracterTest
+public class OpenOfficeMetadataExtracterTest extends AbstractMetadataExtracterTest
{
- private UnoMetadataExtracter extracter;
+ private OpenOfficeMetadataExtracter extracter;
@Override
public void setUp() throws Exception
{
super.setUp();
- extracter = new UnoMetadataExtracter();
+
+ OpenOfficeConnection connection = new SocketOpenOfficeConnection();
+
+ extracter = new OpenOfficeMetadataExtracter();
extracter.setMimetypeService(mimetypeMap);
+ extracter.setConnection(connection);
extracter.init();
}
@@ -48,7 +55,7 @@ public class UnoMetadataExtracterTest extends AbstractMetadataExtracterTest
return;
}
- for (String mimetype : UnoMetadataExtracter.SUPPORTED_MIMETYPES)
+ for (String mimetype : OpenOfficeMetadataExtracter.SUPPORTED_MIMETYPES)
{
double reliability = extracter.getReliability(mimetype);
assertTrue("Expected above zero reliability", reliability > 0.0);
@@ -61,7 +68,7 @@ public class UnoMetadataExtracterTest extends AbstractMetadataExtracterTest
{
return;
}
- for (String mimetype : UnoMetadataExtracter.SUPPORTED_MIMETYPES)
+ for (String mimetype : OpenOfficeMetadataExtracter.SUPPORTED_MIMETYPES)
{
testExtractFromMimetype(mimetype);
}
diff --git a/source/java/org/alfresco/repo/content/metadata/PdfBoxMetadataExtracter.java b/source/java/org/alfresco/repo/content/metadata/PdfBoxMetadataExtracter.java
index e335c6cf83..5f0d796058 100644
--- a/source/java/org/alfresco/repo/content/metadata/PdfBoxMetadataExtracter.java
+++ b/source/java/org/alfresco/repo/content/metadata/PdfBoxMetadataExtracter.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005 Jesper Steen Møller
+ * Copyright (C) 2005 Jesper Steen M�ller
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
@@ -31,7 +31,7 @@ import org.pdfbox.pdmodel.PDDocumentInformation;
/**
*
- * @author Jesper Steen Møller
+ * @author Jesper Steen Møller
*/
public class PdfBoxMetadataExtracter extends AbstractMetadataExtracter
{
diff --git a/source/java/org/alfresco/repo/content/metadata/PdfBoxMetadataExtracterTest.java b/source/java/org/alfresco/repo/content/metadata/PdfBoxMetadataExtracterTest.java
index 6b82efa45e..70049a7e92 100644
--- a/source/java/org/alfresco/repo/content/metadata/PdfBoxMetadataExtracterTest.java
+++ b/source/java/org/alfresco/repo/content/metadata/PdfBoxMetadataExtracterTest.java
@@ -5,7 +5,7 @@ import org.alfresco.repo.content.MimetypeMap;
/**
* @see org.alfresco.repo.content.metadata.PdfBoxMetadataExtracter
*
- * @author Jesper Steen Møller
+ * @author Jesper Steen Møller
*/
public class PdfBoxMetadataExtracterTest extends AbstractMetadataExtracterTest
{
diff --git a/source/java/org/alfresco/repo/content/transform/AbstractContentTransformer.java b/source/java/org/alfresco/repo/content/transform/AbstractContentTransformer.java
index c7e8a6b106..ee82eac135 100644
--- a/source/java/org/alfresco/repo/content/transform/AbstractContentTransformer.java
+++ b/source/java/org/alfresco/repo/content/transform/AbstractContentTransformer.java
@@ -123,12 +123,8 @@ public abstract class AbstractContentTransformer implements ContentTransformer
{
if (registry == null)
{
- if (registry == null)
- {
- logger.warn("Property 'registry' has not been set. Ignoring auto-registration: \n" +
- " transformer: " + this);
- return;
- }
+ logger.warn("Property 'registry' has not been set. Ignoring auto-registration: \n" +
+ " transformer: " + this);
return;
}
// first register any explicit transformations
diff --git a/source/java/org/alfresco/repo/content/transform/ContentTransformerRegistry.java b/source/java/org/alfresco/repo/content/transform/ContentTransformerRegistry.java
index 5b9ccca560..242499ffc4 100644
--- a/source/java/org/alfresco/repo/content/transform/ContentTransformerRegistry.java
+++ b/source/java/org/alfresco/repo/content/transform/ContentTransformerRegistry.java
@@ -50,7 +50,6 @@ public class ContentTransformerRegistry
private MimetypeMap mimetypeMap;
/** Cache of previously used transactions */
private Map> transformationCache;
- private short accessCount;
/** Controls read access to the transformation cache */
private Lock transformationCacheReadLock;
/** controls write access to the transformation cache */
@@ -67,7 +66,6 @@ public class ContentTransformerRegistry
this.transformers = new ArrayList(10);
transformationCache = new HashMap>(17);
- accessCount = 0;
// create lock objects for access to the cache
ReadWriteLock transformationCacheLock = new ReentrantReadWriteLock();
transformationCacheReadLock = transformationCacheLock.readLock();
@@ -120,7 +118,6 @@ public class ContentTransformerRegistry
try
{
transformationCache.clear();
- accessCount = 0;
}
finally
{
@@ -243,7 +240,6 @@ public class ContentTransformerRegistry
private List findDirectTransformers(String sourceMimetype, String targetMimetype)
{
double maxReliability = 0.0;
- long leastTime = 100000L; // 100 seconds - longer than anyone would think of waiting
List bestTransformers = new ArrayList(2);
// loop through transformers
for (ContentTransformer transformer : this.transformers)
@@ -289,6 +285,7 @@ public class ContentTransformerRegistry
/**
* Recursive method to build up a list of content transformers
*/
+ @SuppressWarnings("unused")
private void buildTransformer(List transformers,
double reliability,
List touchedMimetypes,
diff --git a/source/java/org/alfresco/repo/content/transform/OpenOfficeContentTransformer.java b/source/java/org/alfresco/repo/content/transform/OpenOfficeContentTransformer.java
new file mode 100644
index 0000000000..007144d77c
--- /dev/null
+++ b/source/java/org/alfresco/repo/content/transform/OpenOfficeContentTransformer.java
@@ -0,0 +1,254 @@
+/*
+ * Copyright (C) 2005 Alfresco, Inc.
+ *
+ * Licensed under the Mozilla Public License version 1.1
+ * with a permitted attribution clause. You may obtain a
+ * copy of the License at
+ *
+ * http://www.alfresco.org/legal/license.txt
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific
+ * language governing permissions and limitations under the
+ * License.
+ */
+package org.alfresco.repo.content.transform;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.ConnectException;
+import java.util.Map;
+
+import net.sf.jooreports.converter.DocumentFamily;
+import net.sf.jooreports.converter.DocumentFormat;
+import net.sf.jooreports.converter.DocumentFormatRegistry;
+import net.sf.jooreports.converter.XmlDocumentFormatRegistry;
+import net.sf.jooreports.openoffice.connection.OpenOfficeConnection;
+import net.sf.jooreports.openoffice.connection.OpenOfficeException;
+import net.sf.jooreports.openoffice.converter.OpenOfficeDocumentConverter;
+
+import org.alfresco.error.AlfrescoRuntimeException;
+import org.alfresco.repo.content.MimetypeMap;
+import org.alfresco.service.cmr.repository.ContentIOException;
+import org.alfresco.service.cmr.repository.ContentReader;
+import org.alfresco.service.cmr.repository.ContentWriter;
+import org.alfresco.service.cmr.repository.MimetypeService;
+import org.alfresco.util.PropertyCheck;
+import org.alfresco.util.TempFileProvider;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.springframework.core.io.DefaultResourceLoader;
+
+/**
+ * Makes use of the {@link http://sourceforge.net/projects/joott/ JOOConverter} library to
+ * perform OpenOffice-drive conversions.
+ *
+ * @author Derek Hulley
+ */
+public class OpenOfficeContentTransformer extends AbstractContentTransformer
+{
+ private static Log logger = LogFactory.getLog(OpenOfficeContentTransformer.class);
+
+ private OpenOfficeConnection connection;
+ private boolean connected;
+ private OpenOfficeDocumentConverter converter;
+ private String documentFormatsConfiguration;
+ private DocumentFormatRegistry formatRegistry;
+
+ public OpenOfficeContentTransformer()
+ {
+ this.connected = false;
+ }
+
+ public void setConnection(OpenOfficeConnection connection)
+ {
+ this.connection = connection;
+ }
+
+ /**
+ * Set a non-default location from which to load the document format mappings.
+ *
+ * @param path a resource location supporting the file: or classpath: prefixes
+ */
+ public void setDocumentFormatsConfiguration(String path)
+ {
+ this.documentFormatsConfiguration = path;
+ }
+
+ public boolean isConnected()
+ {
+ return connected;
+ }
+
+ private synchronized void connect()
+ {
+ try
+ {
+ connection.connect();
+ connected = true;
+ }
+ catch (ConnectException e)
+ {
+ logger.warn(e.getMessage());
+ connected = false;
+ }
+ }
+
+ @Override
+ public void register()
+ {
+ PropertyCheck.mandatory("OpenOfficeContentTransformer", "connection", connection);
+
+ // attempt to establish a connection
+ connect();
+
+ // set up the converter
+ converter = new OpenOfficeDocumentConverter(connection);
+
+ // load the document conversion configuration
+ if (documentFormatsConfiguration != null)
+ {
+ DefaultResourceLoader resourceLoader = new DefaultResourceLoader();
+ try
+ {
+ InputStream is = resourceLoader.getResource(documentFormatsConfiguration).getInputStream();
+ formatRegistry = new XmlDocumentFormatRegistry(is);
+ }
+ catch (IOException e)
+ {
+ throw new AlfrescoRuntimeException(
+ "Unable to load document formats configuration file: " + documentFormatsConfiguration);
+ }
+ }
+ else
+ {
+ formatRegistry = new XmlDocumentFormatRegistry();
+ }
+
+ if (connected)
+ {
+ // register
+ super.register();
+ }
+ }
+
+ /**
+ * @see DocumentFormatRegistry
+ */
+ public double getReliability(String sourceMimetype, String targetMimetype)
+ {
+ if (!connected)
+ {
+ return 0.0;
+ }
+
+ // there are some conversions that fail, despite the converter believing them possible
+ if (targetMimetype.equals(MimetypeMap.MIMETYPE_XHTML))
+ {
+ return 0.0;
+ }
+ else if (targetMimetype.equals(MimetypeMap.MIMETYPE_WORDPERFECT))
+ {
+ return 0.0;
+ }
+
+ MimetypeService mimetypeService = getMimetypeService();
+ String sourceExtension = mimetypeService.getExtension(sourceMimetype);
+ String targetExtension = mimetypeService.getExtension(targetMimetype);
+ // query the registry for the source format
+ DocumentFormat sourceFormat = formatRegistry.getFormatByFileExtension(sourceExtension);
+ if (sourceFormat == null)
+ {
+ // no document format
+ return 0.0;
+ }
+ // query the registry for the target format
+ DocumentFormat targetFormat = formatRegistry.getFormatByFileExtension(targetExtension);
+ if (targetFormat == null)
+ {
+ // no document format
+ return 0.0;
+ }
+
+ // get the family of the target document
+ DocumentFamily sourceFamily = sourceFormat.getFamily();
+ // does the format support the conversion
+ if (!targetFormat.isExportableFrom(sourceFamily))
+ {
+ // unable to export from source family of documents to the target format
+ return 0.0;
+ }
+ else
+ {
+ return 1.0;
+ }
+ }
+
+ protected void transformInternal(
+ ContentReader reader,
+ ContentWriter writer,
+ Map options) throws Exception
+ {
+ String sourceMimetype = getMimetype(reader);
+ String targetMimetype = getMimetype(writer);
+
+ MimetypeService mimetypeService = getMimetypeService();
+ String sourceExtension = mimetypeService.getExtension(sourceMimetype);
+ String targetExtension = mimetypeService.getExtension(targetMimetype);
+ // query the registry for the source format
+ DocumentFormat sourceFormat = formatRegistry.getFormatByFileExtension(sourceExtension);
+ if (sourceFormat == null)
+ {
+ // source format is not recognised
+ throw new ContentIOException("No OpenOffice document format for source extension: " + sourceExtension);
+ }
+ // query the registry for the target format
+ DocumentFormat targetFormat = formatRegistry.getFormatByFileExtension(targetExtension);
+ if (targetFormat == null)
+ {
+ // target format is not recognised
+ throw new ContentIOException("No OpenOffice document format for target extension: " + sourceExtension);
+ }
+ // get the family of the target document
+ DocumentFamily sourceFamily = sourceFormat.getFamily();
+ // does the format support the conversion
+ if (!targetFormat.isExportableFrom(sourceFamily))
+ {
+ throw new ContentIOException(
+ "OpenOffice conversion not supported: \n" +
+ " reader: " + reader + "\n" +
+ " writer: " + writer);
+ }
+
+ // create temporary files to convert from and to
+ File tempFromFile = TempFileProvider.createTempFile(
+ "OpenOfficeContentTransformer-source-",
+ "." + sourceExtension);
+ File tempToFile = TempFileProvider.createTempFile(
+ "OpenOfficeContentTransformer-target-",
+ "." + targetExtension);
+ // download the content from the source reader
+ reader.getContent(tempFromFile);
+
+ try
+ {
+ converter.convert(tempFromFile, sourceFormat, tempToFile, targetFormat);
+ // conversion success
+ }
+ catch (OpenOfficeException e)
+ {
+ throw new ContentIOException("OpenOffice server conversion failed: \n" +
+ " reader: " + reader + "\n" +
+ " writer: " + writer + "\n" +
+ " from file: " + tempFromFile + "\n" +
+ " to file: " + tempToFile,
+ e);
+ }
+
+ // upload the temp output to the writer given us
+ writer.putContent(tempToFile);
+ }
+}
diff --git a/source/java/org/alfresco/repo/content/transform/UnoContentTransformerTest.java b/source/java/org/alfresco/repo/content/transform/OpenOfficeContentTransformerTest.java
similarity index 70%
rename from source/java/org/alfresco/repo/content/transform/UnoContentTransformerTest.java
rename to source/java/org/alfresco/repo/content/transform/OpenOfficeContentTransformerTest.java
index 76133b224a..12d016db23 100644
--- a/source/java/org/alfresco/repo/content/transform/UnoContentTransformerTest.java
+++ b/source/java/org/alfresco/repo/content/transform/OpenOfficeContentTransformerTest.java
@@ -16,24 +16,30 @@
*/
package org.alfresco.repo.content.transform;
+import net.sf.jooreports.openoffice.connection.OpenOfficeConnection;
+
import org.alfresco.repo.content.MimetypeMap;
/**
- * @see org.alfresco.repo.content.transform.UnoContentTransformer
+ * @see org.alfresco.repo.content.transform.OpenOfficeContentTransformer
*
* @author Derek Hulley
*/
-public class UnoContentTransformerTest extends AbstractContentTransformerTest
+public class OpenOfficeContentTransformerTest extends AbstractContentTransformerTest
{
private static String MIMETYPE_RUBBISH = "text/rubbish";
- private UnoContentTransformer transformer;
+ private OpenOfficeContentTransformer transformer;
public void onSetUpInTransaction() throws Exception
{
- transformer = new UnoContentTransformer();
+ OpenOfficeConnection connection = (OpenOfficeConnection) applicationContext.getBean("openOfficeConnection");
+
+ transformer = new OpenOfficeContentTransformer();
transformer.setMimetypeService(mimetypeMap);
- transformer.init();
+ transformer.setConnection(connection);
+ transformer.setDocumentFormatsConfiguration("classpath:alfresco/mimetype/openoffice-document-formats.xml");
+ transformer.register();
}
/**
@@ -62,6 +68,8 @@ public class UnoContentTransformerTest extends AbstractContentTransformerTest
assertEquals("Mimetype should not be supported", 0.0, reliability);
reliability = transformer.getReliability(MimetypeMap.MIMETYPE_TEXT_PLAIN, MIMETYPE_RUBBISH);
assertEquals("Mimetype should not be supported", 0.0, reliability);
+ reliability = transformer.getReliability(MimetypeMap.MIMETYPE_TEXT_PLAIN, MimetypeMap.MIMETYPE_XHTML);
+ assertEquals("Mimetype should not be supported", 0.0, reliability);
reliability = transformer.getReliability(MimetypeMap.MIMETYPE_TEXT_PLAIN, MimetypeMap.MIMETYPE_WORD);
assertEquals("Mimetype should be supported", 1.0, reliability);
reliability = transformer.getReliability(MimetypeMap.MIMETYPE_WORD, MimetypeMap.MIMETYPE_TEXT_PLAIN);
diff --git a/source/java/org/alfresco/repo/content/transform/UnoContentTransformer.java b/source/java/org/alfresco/repo/content/transform/UnoContentTransformer.java
deleted file mode 100644
index 643336d718..0000000000
--- a/source/java/org/alfresco/repo/content/transform/UnoContentTransformer.java
+++ /dev/null
@@ -1,337 +0,0 @@
-/*
- * Copyright (C) 2005 Alfresco, Inc.
- *
- * Licensed under the Mozilla Public License version 1.1
- * with a permitted attribution clause. You may obtain a
- * copy of the License at
- *
- * http://www.alfresco.org/legal/license.txt
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific
- * language governing permissions and limitations under the
- * License.
- */
-package org.alfresco.repo.content.transform;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.ConnectException;
-import java.util.HashMap;
-import java.util.Map;
-
-import net.sf.joott.uno.DocumentConverter;
-import net.sf.joott.uno.DocumentFormat;
-import net.sf.joott.uno.UnoConnection;
-
-import org.alfresco.repo.content.MimetypeMap;
-import org.alfresco.service.cmr.repository.ContentIOException;
-import org.alfresco.service.cmr.repository.ContentReader;
-import org.alfresco.service.cmr.repository.ContentWriter;
-import org.alfresco.util.TempFileProvider;
-
-/**
- * Makes use of the OpenOffice Uno interfaces to convert the content.
- *
- * The conversions are slow but reliable. Not all possible combinations of transformations
- * have been enabled because they don't necessarily work and need to be specifically tested before
- * being made available generally. As the conversion process is mostly automated, the introduction
- * of faulty transformations can lead to unnecessary bugs. Feel free to experiment and, assuming
- * that the unit test works, report any interesting conversions that can be enabled.
- *
- * @author Derek Hulley
- */
-public class UnoContentTransformer extends AbstractContentTransformer
-{
- /** map of DocumentFormat instances keyed by mimetype conversion */
- private static Map formatsByConversion;
-
- static
- {
- // Build the map of known Uno document formats and store by conversion key
- formatsByConversion = new HashMap(17);
-
- // Open Office 2.0 / Open Document
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENDOCUMENT_TEXT, MimetypeMap.MIMETYPE_TEXT_PLAIN),
- new DocumentFormatWrapper(DocumentFormat.TEXT, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENDOCUMENT_TEXT, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENDOCUMENT_SPREADSHEET, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_CALC, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENDOCUMENT_PRESENTATION, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
- // Open Office
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENOFFICE1_WRITER, MimetypeMap.MIMETYPE_TEXT_PLAIN),
- new DocumentFormatWrapper(DocumentFormat.TEXT, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENOFFICE1_WRITER, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENOFFICE1_CALC, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENOFFICE1_DRAW, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_IMPRESS, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_OPENOFFICE1_IMPRESS, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_IMPRESS, 1.0));
- // Star Office 5.x
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_STAROFFICE5_DRAW, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_IMPRESS, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_STAROFFICE5_CALC, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_CALC, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_STAROFFICE5_CHART, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_STAROFFICE5_IMPRESS, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_STAROFFICE5_IMPRESS_PACKED, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_IMPRESS, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_STAROFFICE5_WRITER, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_STAROFFICE5_WRITER_GLOBAL, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
- // MS Office
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_WORD, MimetypeMap.MIMETYPE_TEXT_PLAIN),
- new DocumentFormatWrapper(DocumentFormat.TEXT, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_WORD, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_EXCEL, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_CALC, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_WORD, MimetypeMap.MIMETYPE_HTML),
- new DocumentFormatWrapper(DocumentFormat.HTML_WRITER, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_PPT, MimetypeMap.MIMETYPE_FLASH),
- new DocumentFormatWrapper(DocumentFormat.FLASH_IMPRESS, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_PPT, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_IMPRESS, 1.0));
- // Other
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_TEXT_PLAIN, MimetypeMap.MIMETYPE_HTML),
- new DocumentFormatWrapper(DocumentFormat.HTML_WRITER, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_TEXT_PLAIN, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_WRITER, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_TEXT_PLAIN, MimetypeMap.MIMETYPE_WORD),
- new DocumentFormatWrapper(DocumentFormat.TEXT, 1.0));
- formatsByConversion.put(
- new ContentTransformerRegistry.TransformationKey(MimetypeMap.MIMETYPE_HTML, MimetypeMap.MIMETYPE_PDF),
- new DocumentFormatWrapper(DocumentFormat.PDF_WRITER_WEB, 1.0));
-
- // there are many more formats available and therefore many more transformation combinations possible
-// DocumentFormat.FLASH_IMPRESS
-// DocumentFormat.HTML_CALC
-// DocumentFormat.HTML_WRITER
-// DocumentFormat.MS_EXCEL_97
-// DocumentFormat.MS_POWERPOINT_97
-// DocumentFormat.MS_WORD_97
-// DocumentFormat.PDF_CALC
-// DocumentFormat.PDF_IMPRESS
-// DocumentFormat.PDF_WRITER
-// DocumentFormat.PDF_WRITER_WEB
-// DocumentFormat.RTF
-// DocumentFormat.TEXT
-// DocumentFormat.TEXT_CALC
-// DocumentFormat.XML_CALC
-// DocumentFormat.XML_IMPRESS
-// DocumentFormat.XML_WRITER
-// DocumentFormat.XML_WRITER_WEB
- }
-
- private String connectionUrl = UnoConnection.DEFAULT_CONNECTION_STRING;
- private UnoConnection connection;
- private boolean isConnected;
-
- /**
- * Constructs the default transformer that will attempt to connect to the
- * Uno server using the default connect string.
- *
- * @see UnoConnection#DEFAULT_CONNECTION_STRING
- */
- public UnoContentTransformer()
- {
- isConnected = false;
- }
-
- /**
- * Override the default connection URL with a new one.
- *
- * @param connectionUrl the connection string
- *
- * @see UnoConnection#DEFAULT_CONNECTION_STRING
- */
- public void setConnectionUrl(String connectionUrl)
- {
- this.connectionUrl = connectionUrl;
- }
-
- /**
- * Connects to the OpenOffice server. If successful, then
- * {@link AbstractContentTransformer#register() auto-registers}.
- */
- public synchronized void init()
- {
- connection = new UnoConnection(connectionUrl);
- // attempt to make an connection
- try
- {
- connection.connect();
- isConnected = true;
- // register
- super.register();
- }
- catch (ConnectException e)
- {
- isConnected = false;
- }
- }
-
- /**
- * @return Returns true if a connection to the Uno server could be established
- */
- public boolean isConnected()
- {
- return isConnected;
- }
-
- /**
- * @param sourceMimetype
- * @param targetMimetype
- * @return Returns a document format wrapper that is valid for the given source and target mimetypes
- */
- private static DocumentFormatWrapper getDocumentFormatWrapper(String sourceMimetype, String targetMimetype)
- {
- // get the well-known document format for the specific conversion
- ContentTransformerRegistry.TransformationKey key =
- new ContentTransformerRegistry.TransformationKey(sourceMimetype, targetMimetype);
- DocumentFormatWrapper wrapper = UnoContentTransformer.formatsByConversion.get(key);
- return wrapper;
- }
-
- /**
- * Checks how reliable the conversion will be when performed by the Uno server.
- *
- * The connection for the Uno server is checked in order to have any chance of
- * being reliable.
- *
- * The conversions' reliabilities are set up statically based on prior tests that
- * included checking performance as well as accuracy.
- */
- public double getReliability(String sourceMimetype, String targetMimetype)
- {
- // check if a connection to the Uno server can be established
- if (!isConnected())
- {
- // no connection means that conversion is not possible
- return 0.0;
- }
- // check if the source and target mimetypes are supported
- DocumentFormatWrapper docFormatWrapper = getDocumentFormatWrapper(sourceMimetype, targetMimetype);
- if (docFormatWrapper == null)
- {
- return 0.0;
- }
- else
- {
- return docFormatWrapper.getReliability();
- }
- }
-
- public void transformInternal(ContentReader reader, ContentWriter writer, Map options)
- throws Exception
- {
- String sourceMimetype = getMimetype(reader);
- String targetMimetype = getMimetype(writer);
-
- // create temporary files to convert from and to
- File tempFromFile = TempFileProvider.createTempFile(
- "UnoContentTransformer",
- "." + getMimetypeService().getExtension(sourceMimetype));
- File tempToFile = TempFileProvider.createTempFile(
- "UnoContentTransformer",
- "." + getMimetypeService().getExtension(targetMimetype));
- // download the content from the source reader
- reader.getContent(tempFromFile);
-
- // get the document format that should be used
- DocumentFormatWrapper docFormatWrapper = getDocumentFormatWrapper(sourceMimetype, targetMimetype);
- try
- {
- docFormatWrapper.execute(tempFromFile, tempToFile, connection);
- // conversion success
- }
- catch (ConnectException e)
- {
- throw new ContentIOException("Connection to Uno server failed: \n" +
- " reader: " + reader + "\n" +
- " writer: " + writer,
- e);
- }
- catch (IOException e)
- {
- throw new ContentIOException("Uno server conversion failed: \n" +
- " reader: " + reader + "\n" +
- " writer: " + writer + "\n" +
- " from file: " + tempFromFile + "\n" +
- " to file: " + tempToFile,
- e);
- }
-
- // upload the temp output to the writer given us
- writer.putContent(tempToFile);
- }
-
- /**
- * Wraps a document format as well the reliability. The source and target mimetypes
- * are not kept, but will probably be closely associated with the reliability.
- */
- private static class DocumentFormatWrapper
- {
- /*
- * Source and target mimetypes not kept -> class is private as it doesn't keep
- * enough info to be used safely externally
- */
-
- private DocumentFormat documentFormat;
- private double reliability;
-
- public DocumentFormatWrapper(DocumentFormat documentFormat, double reliability)
- {
- this.documentFormat = documentFormat;
- this.reliability = reliability;
- }
-
- public double getReliability()
- {
- return reliability;
- }
-
- /**
- * Executs the transformation
- */
- public void execute(File fromFile, File toFile, UnoConnection connection) throws ConnectException, IOException
- {
- DocumentConverter converter = new DocumentConverter(connection);
- converter.convert(fromFile, toFile, documentFormat);
- }
- }
-}
diff --git a/source/java/org/alfresco/repo/copy/CopyServiceImpl.java b/source/java/org/alfresco/repo/copy/CopyServiceImpl.java
index 02b7581739..352c58cd26 100644
--- a/source/java/org/alfresco/repo/copy/CopyServiceImpl.java
+++ b/source/java/org/alfresco/repo/copy/CopyServiceImpl.java
@@ -47,6 +47,10 @@ import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.rule.RuleService;
import org.alfresco.service.cmr.search.ResultSet;
import org.alfresco.service.cmr.search.SearchService;
+import org.alfresco.service.cmr.security.AccessPermission;
+import org.alfresco.service.cmr.security.AccessStatus;
+import org.alfresco.service.cmr.security.AuthenticationService;
+import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.namespace.RegexQNamePattern;
@@ -73,6 +77,12 @@ public class CopyServiceImpl implements CopyService
/** Rule service */
private RuleService ruleService;
+
+ /** Permission service */
+ private PermissionService permissionService;
+
+ /** Authentication service */
+ private AuthenticationService authenticationService;
/** Policy delegates */
private ClassPolicyDelegate onCopyNodeDelegate;
@@ -128,6 +138,26 @@ public class CopyServiceImpl implements CopyService
this.ruleService = ruleService;
}
+ /**
+ * Set the permission service
+ *
+ * @param permissionService the permission service
+ */
+ public void setPermissionService(PermissionService permissionService)
+ {
+ this.permissionService = permissionService;
+ }
+
+ /**
+ * Sets the authentication service
+ *
+ * @param authenticationService the authentication service
+ */
+ public void setAuthenticationService(AuthenticationService authenticationService)
+ {
+ this.authenticationService = authenticationService;
+ }
+
/**
* Initialise method
*/
@@ -395,6 +425,9 @@ public class CopyServiceImpl implements CopyService
// Copy the associations
copyAssociations(destinationNodeRef, copyDetails, copyChildren, copiedChildren);
+
+ // Copy permissions
+ copyPermissions(sourceNodeRef, destinationNodeRef);
}
finally
{
@@ -404,6 +437,34 @@ public class CopyServiceImpl implements CopyService
return destinationNodeRef;
}
+ /**
+ * Copies the permissions of the source node reference onto the destination node reference
+ *
+ * @param sourceNodeRef the source node reference
+ * @param destinationNodeRef the destination node reference
+ */
+ private void copyPermissions(NodeRef sourceNodeRef, NodeRef destinationNodeRef)
+ {
+ // Get the permission details of the source node reference
+ Set permissions = this.permissionService.getAllSetPermissions(sourceNodeRef);
+ boolean includeInherited = this.permissionService.getInheritParentPermissions(sourceNodeRef);
+
+ AccessStatus writePermission = permissionService.hasPermission(destinationNodeRef, PermissionService.CHANGE_PERMISSIONS);
+ if (this.authenticationService.isCurrentUserTheSystemUser() || writePermission.equals(AccessStatus.ALLOWED))
+ {
+ // Set the permission values on the destination node
+ for (AccessPermission permission : permissions)
+ {
+ this.permissionService.setPermission(
+ destinationNodeRef,
+ permission.getAuthority(),
+ permission.getPermission(),
+ permission.getAccessStatus().equals(AccessStatus.ALLOWED));
+ }
+ this.permissionService.setInheritParentPermissions(destinationNodeRef, includeInherited);
+ }
+ }
+
/**
* Gets the copy details. This calls the appropriate policies that have been registered
* against the node and aspect types in order to pick-up any type specific copy behaviour.
diff --git a/source/java/org/alfresco/repo/jscript/Node.java b/source/java/org/alfresco/repo/jscript/Node.java
index 8fe97a8c2b..2d7940249c 100644
--- a/source/java/org/alfresco/repo/jscript/Node.java
+++ b/source/java/org/alfresco/repo/jscript/Node.java
@@ -21,6 +21,7 @@ import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.text.MessageFormat;
import java.util.ArrayList;
+import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -53,6 +54,8 @@ import org.alfresco.service.namespace.QName;
import org.alfresco.service.namespace.RegexQNamePattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.mozilla.javascript.Context;
+import org.mozilla.javascript.NativeArray;
import org.mozilla.javascript.ScriptableObject;
import org.mozilla.javascript.Wrapper;
import org.springframework.util.StringUtils;
@@ -847,56 +850,80 @@ public final class Node implements Serializable
for (String key : this.properties.keySet())
{
Serializable value = (Serializable)this.properties.get(key);
- if (value instanceof Node)
- {
- // convert back to NodeRef
- value = ((Node)value).getNodeRef();
- }
- else if (value instanceof ScriptContentData)
- {
- // convert back to ContentData
- value = ((ScriptContentData)value).contentData;
- }
- else if (value instanceof Wrapper)
- {
- // unwrap a Java object from a JavaScript wrapper
- value = (Serializable)((Wrapper)value).unwrap();
- }
- else if (value instanceof ScriptableObject)
- {
- // a scriptable object will probably indicate a multi-value property
- // set using a JavaScript Array object
- ScriptableObject values = (ScriptableObject)value;
-
- // convert JavaScript array of values to a List of Serializable objects
- Object[] propIds = values.getIds();
- List propValues = new ArrayList(propIds.length);
- for (int i=0; i propValues = new ArrayList(propIds.length);
+ for (int i=0; i R doWithWriteLock(LockWork lockWork)
+ {
+ return indexInfo.doWithWriteLock(lockWork);
+ }
}
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/LuceneIndexBackupComponentTest.java b/source/java/org/alfresco/repo/search/impl/lucene/LuceneIndexBackupComponentTest.java
index 615c5ad050..dc05e51163 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/LuceneIndexBackupComponentTest.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/LuceneIndexBackupComponentTest.java
@@ -18,6 +18,8 @@ package org.alfresco.repo.search.impl.lucene;
import java.io.File;
+import junit.framework.TestCase;
+
import org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory.LuceneIndexBackupComponent;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.service.cmr.repository.NodeService;
@@ -26,8 +28,6 @@ import org.alfresco.util.ApplicationContextHelper;
import org.alfresco.util.TempFileProvider;
import org.springframework.context.ApplicationContext;
-import junit.framework.TestCase;
-
/**
* @see org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcherFactory.LuceneIndexBackupComponent
*
@@ -47,7 +47,7 @@ public class LuceneIndexBackupComponentTest extends TestCase
{
TransactionService transactionService = (TransactionService) ctx.getBean("transactionComponent");
NodeService nodeService = (NodeService) ctx.getBean("NodeService");
- LuceneIndexerAndSearcherFactory factory = (LuceneIndexerAndSearcherFactory) ctx.getBean("luceneIndexerAndSearcherFactory");
+ LuceneIndexerAndSearcher factory = (LuceneIndexerAndSearcher) ctx.getBean("luceneIndexerAndSearcherFactory");
this.authenticationComponent = (AuthenticationComponent)ctx.getBean("authenticationComponent");
this.authenticationComponent.setSystemUserAsCurrentUser();
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/LuceneIndexer2.java b/source/java/org/alfresco/repo/search/impl/lucene/LuceneIndexer2.java
index a7924ed311..2a0c9193c2 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/LuceneIndexer2.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/LuceneIndexer2.java
@@ -18,10 +18,9 @@ package org.alfresco.repo.search.impl.lucene;
import java.util.Set;
-import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.IndexerSPI;
-import org.alfresco.repo.search.impl.lucene.fts.FTSIndexerAware;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
+import org.alfresco.repo.search.impl.lucene.index.IndexInfo;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
@@ -44,4 +43,6 @@ public interface LuceneIndexer2 extends IndexerSPI
public void flushPending() throws LuceneIndexException;
public Set getDeletions();
public boolean getDeleteOnlyNodes();
+
+ public R doWithWriteLock(IndexInfo.LockWork lockWork);
}
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/LuceneIndexerAndSearcher.java b/source/java/org/alfresco/repo/search/impl/lucene/LuceneIndexerAndSearcher.java
index 14627c3092..75f94ab42e 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/LuceneIndexerAndSearcher.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/LuceneIndexerAndSearcher.java
@@ -24,4 +24,12 @@ public interface LuceneIndexerAndSearcher extends IndexerAndSearcher, LuceneConf
public int prepare() throws IndexerException;
public void commit() throws IndexerException;
public void rollback();
+
+
+ public interface WithAllWriteLocksWork
+ {
+ public Result doWork() throws Exception;
+ }
+
+ public R doWithAllWriteLocks(WithAllWriteLocksWork lockWork);
}
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/LuceneIndexerAndSearcherFactory.java b/source/java/org/alfresco/repo/search/impl/lucene/LuceneIndexerAndSearcherFactory.java
index a341edf844..a181147115 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/LuceneIndexerAndSearcherFactory.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/LuceneIndexerAndSearcherFactory.java
@@ -33,6 +33,7 @@ import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.search.IndexerException;
import org.alfresco.repo.search.QueryRegisterComponent;
import org.alfresco.repo.search.SearcherException;
+import org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcher.WithAllWriteLocksWork;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.search.transaction.LuceneIndexLock;
import org.alfresco.repo.search.transaction.SimpleTransaction;
@@ -61,11 +62,9 @@ import org.quartz.JobExecutionException;
/**
* This class is resource manager LuceneIndexers and LuceneSearchers.
*
- * It supports two phase commit inside XA transactions and outside transactions
- * it provides thread local transaction support.
+ * It supports two phase commit inside XA transactions and outside transactions it provides thread local transaction support.
*
- * TODO: Provide pluggable support for a transaction manager TODO: Integrate
- * with Spring transactions
+ * TODO: Provide pluggable support for a transaction manager TODO: Integrate with Spring transactions
*
* @author andyh
*
@@ -73,6 +72,8 @@ import org.quartz.JobExecutionException;
public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher, XAResource
{
+ private static Log logger = LogFactory.getLog(LuceneIndexerAndSearcherFactory.class);
+
private DictionaryService dictionaryService;
private NamespaceService nameSpaceService;
@@ -90,9 +91,7 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
private String lockDirectory;
/**
- * A map of active global transactions . It contains all the indexers a
- * transaction has used, with at most one indexer for each store within a
- * transaction
+ * A map of active global transactions . It contains all the indexers a transaction has used, with at most one indexer for each store within a transaction
*/
private static Map> activeIndexersInGlobalTx = new HashMap>();
@@ -191,20 +190,18 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
}
/**
- * Set the maximum average transformation time allowed to a transformer in order to have
- * the transformation performed in the current transaction. The default is 20ms.
+ * Set the maximum average transformation time allowed to a transformer in order to have the transformation performed in the current transaction. The default is 20ms.
*
- * @param maxAtomicTransformationTime the maximum average time that a text transformation may
- * take in order to be performed atomically.
+ * @param maxAtomicTransformationTime
+ * the maximum average time that a text transformation may take in order to be performed atomically.
*/
public void setMaxAtomicTransformationTime(long maxAtomicTransformationTime)
{
this.maxAtomicTransformationTime = maxAtomicTransformationTime;
}
-
+
/**
- * Check if we are in a global transactoin according to the transaction
- * manager
+ * Check if we are in a global transactoin according to the transaction manager
*
* @return
*/
@@ -240,8 +237,7 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
}
/**
- * Get an indexer for the store to use in the current transaction for this
- * thread of control.
+ * Get an indexer for the store to use in the current transaction for this thread of control.
*
* @param storeRef -
* the id of the store
@@ -454,7 +450,8 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
}
return;
}
- } finally
+ }
+ finally
{
activeIndexersInGlobalTx.remove(xid);
}
@@ -587,7 +584,8 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
{
indexer.rollback();
}
- } finally
+ }
+ finally
{
activeIndexersInGlobalTx.remove(xid);
}
@@ -677,7 +675,8 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
}
}
}
- } finally
+ }
+ finally
{
if (threadLocalIndexers.get() != null)
{
@@ -875,17 +874,17 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
{
IndexWriter.WRITE_LOCK_TIMEOUT = timeout;
}
-
+
public void setCommitLockTimeout(long timeout)
{
IndexWriter.COMMIT_LOCK_TIMEOUT = timeout;
}
-
+
public void setLockPollInterval(long time)
{
Lock.LOCK_POLL_INTERVAL = time;
}
-
+
public int getIndexerMaxFieldLength()
{
return indexerMaxFieldLength;
@@ -896,25 +895,24 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
this.indexerMaxFieldLength = indexerMaxFieldLength;
System.setProperty("org.apache.lucene.maxFieldLength", "" + indexerMaxFieldLength);
}
-
+
/**
- * This component is able to safely perform backups of the Lucene indexes while
- * the server is running.
+ * This component is able to safely perform backups of the Lucene indexes while the server is running.
*
- * It can be run directly by calling the {@link #backup() } method, but the convenience
- * {@link LuceneIndexBackupJob} can be used to call it as well.
+ * It can be run directly by calling the {@link #backup() } method, but the convenience {@link LuceneIndexBackupJob} can be used to call it as well.
*
* @author Derek Hulley
*/
public static class LuceneIndexBackupComponent
{
- private static Log logger = LogFactory.getLog(LuceneIndexerAndSearcherFactory.class);
-
private TransactionService transactionService;
- private LuceneIndexerAndSearcherFactory factory;
+
+ private LuceneIndexerAndSearcher factory;
+
private NodeService nodeService;
+
private String targetLocation;
-
+
public LuceneIndexBackupComponent()
{
}
@@ -932,9 +930,10 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
/**
* Set the Lucene index factory that will be used to control the index locks
*
- * @param factory the index factory
+ * @param factory
+ * the index factory
*/
- public void setFactory(LuceneIndexerAndSearcherFactory factory)
+ public void setFactory(LuceneIndexerAndSearcher factory)
{
this.factory = factory;
}
@@ -942,7 +941,8 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
/**
* Used to retrieve the stores
*
- * @param nodeService the node service
+ * @param nodeService
+ * the node service
*/
public void setNodeService(NodeService nodeService)
{
@@ -952,13 +952,14 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
/**
* Set the directory to which the backup will be copied
*
- * @param targetLocation the backup directory
+ * @param targetLocation
+ * the backup directory
*/
public void setTargetLocation(String targetLocation)
{
this.targetLocation = targetLocation;
}
-
+
/**
* Backup the Lucene indexes
*/
@@ -978,64 +979,48 @@ public class LuceneIndexerAndSearcherFactory implements LuceneIndexerAndSearcher
private void backupImpl()
{
// create the location to copy to
- File targetDir = new File(targetLocation);
+ final File targetDir = new File(targetLocation);
if (targetDir.exists() && !targetDir.isDirectory())
{
throw new AlfrescoRuntimeException("Target location is a file and not a directory: " + targetDir);
}
- File targetParentDir = targetDir.getParentFile();
+ final File targetParentDir = targetDir.getParentFile();
if (targetParentDir == null)
{
throw new AlfrescoRuntimeException("Target location may not be a root directory: " + targetDir);
}
- File tempDir = new File(targetParentDir, "indexbackup_temp");
+ final File tempDir = new File(targetParentDir, "indexbackup_temp");
- // get all the available stores
- List storeRefs = nodeService.getStores();
-
- // lock all the stores
- List lockedStores = new ArrayList(storeRefs.size());
- try
+ factory.doWithAllWriteLocks(new WithAllWriteLocksWork