that the keystore exists and, if not, creates one.
- *
that the encryption keys have not been changed. If so, the bootstrap will be halted.
- *
- *
- * @since 4.0
- *
- */
-public class EncryptionChecker extends AbstractLifecycleBean
-{
- private TransactionService transactionService;
- private KeyStoreChecker keyStoreChecker;
-
- public void setKeyStoreChecker(KeyStoreChecker keyStoreChecker)
- {
- this.keyStoreChecker = keyStoreChecker;
- }
-
- public void setTransactionService(TransactionService transactionService)
- {
- this.transactionService = transactionService;
- }
-
- @Override
- protected void onBootstrap(ApplicationEvent event)
- {
- RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper();
- txnHelper.setForceWritable(true); // Force write in case server is read-only
-
- txnHelper.doInTransaction(new RetryingTransactionCallback()
- {
- public Void execute() throws Throwable
- {
- try
- {
- keyStoreChecker.validateKeyStores();
- }
- catch(Throwable e)
- {
- // Just throw as a runtime exception
- throw new AlfrescoRuntimeException("Keystores are invalid", e);
- }
-
- return null;
- }
- });
- }
-
- @Override
- protected void onShutdown(ApplicationEvent event)
- {
-
- }
-}
+package org.alfresco.encryption;
+
+import org.alfresco.error.AlfrescoRuntimeException;
+import org.alfresco.repo.transaction.RetryingTransactionHelper;
+import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
+import org.alfresco.service.transaction.TransactionService;
+import org.springframework.context.ApplicationEvent;
+import org.springframework.extensions.surf.util.AbstractLifecycleBean;
+
+/**
+ * The EncryptionChecker checks the state of the repository's encryption system.
+ * In particular it checks:
+ *
+ *
that the keystore exists and, if not, creates one.
+ *
that the encryption keys have not been changed. If so, the bootstrap will be halted.
+ *
+ *
+ * @since 4.0
+ *
+ */
+public class EncryptionChecker extends AbstractLifecycleBean
+{
+ private TransactionService transactionService;
+ private KeyStoreChecker keyStoreChecker;
+
+ public void setKeyStoreChecker(KeyStoreChecker keyStoreChecker)
+ {
+ this.keyStoreChecker = keyStoreChecker;
+ }
+
+ public void setTransactionService(TransactionService transactionService)
+ {
+ this.transactionService = transactionService;
+ }
+
+ @Override
+ protected void onBootstrap(ApplicationEvent event)
+ {
+ RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper();
+ txnHelper.setForceWritable(true); // Force write in case server is read-only
+
+ txnHelper.doInTransaction(new RetryingTransactionCallback()
+ {
+ public Void execute() throws Throwable
+ {
+ try
+ {
+ keyStoreChecker.validateKeyStores();
+ }
+ catch(Throwable e)
+ {
+ // Just throw as a runtime exception
+ throw new AlfrescoRuntimeException("Keystores are invalid", e);
+ }
+
+ return null;
+ }
+ });
+ }
+
+ @Override
+ protected void onShutdown(ApplicationEvent event)
+ {
+
+ }
+}
diff --git a/source/java/org/alfresco/encryption/EncryptionKeysRegistryImpl.java b/source/java/org/alfresco/encryption/EncryptionKeysRegistryImpl.java
index a63bdd3bbc..d04c15b08b 100644
--- a/source/java/org/alfresco/encryption/EncryptionKeysRegistryImpl.java
+++ b/source/java/org/alfresco/encryption/EncryptionKeysRegistryImpl.java
@@ -1,208 +1,208 @@
-package org.alfresco.encryption;
-
-import java.io.Serializable;
-import java.security.InvalidKeyException;
-import java.security.Key;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Set;
-
-import org.alfresco.repo.transaction.RetryingTransactionHelper;
-import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
-import org.alfresco.service.cmr.attributes.AttributeService;
-import org.alfresco.service.cmr.attributes.AttributeService.AttributeQueryCallback;
-import org.alfresco.service.transaction.TransactionService;
-import org.alfresco.util.EqualsHelper;
-import org.alfresco.util.GUID;
-import org.alfresco.util.Pair;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-/**
- * Registered Encryption Keys are stored in the AttributeService directly under a top level key defined by
- * TOP_LEVEL_KEY (which means that all key aliases must be unique across however many keystores are being used).
- *
- * @since 4.0
- *
- */
-// TODO caching? This will probably not be used extensively.
-// TODO instead of persisting the Pair when registering a key, create two attributes per key (one for the
-// guid and one for the encrypted value of the guid). This means a custom class does not need to be bound to
-// the attribute service.
-public class EncryptionKeysRegistryImpl implements EncryptionKeysRegistry
-{
- public static String TOP_LEVEL_KEY = "keyCheck";
- private static final Log logger = LogFactory.getLog(EncryptionKeysRegistryImpl.class);
-
- private TransactionService transactionService;
- private AttributeService attributeService;
- private String cipherAlgorithm;
- private String cipherProvider;
-
- public void setAttributeService(AttributeService attributeService)
- {
- this.attributeService = attributeService;
- }
-
- public void setCipherAlgorithm(String cipherAlgorithm)
- {
- this.cipherAlgorithm = cipherAlgorithm;
- }
-
- public void setCipherProvider(String cipherProvider)
- {
- this.cipherProvider = cipherProvider;
- }
-
- public void setTransactionService(TransactionService transactionService)
- {
- this.transactionService = transactionService;
- }
-
- protected Encryptor getEncryptor(final KeyMap keys)
- {
- DefaultEncryptor encryptor = new DefaultEncryptor();
- encryptor.setCipherAlgorithm(cipherAlgorithm);
- encryptor.setCipherProvider(cipherProvider);
- encryptor.setKeyProvider(new KeyProvider()
- {
- @Override
- public Key getKey(String keyAlias)
- {
- return keys.getCachedKey(keyAlias).getKey();
- }
- });
- return encryptor;
- }
-
- public void init()
- {
- }
-
- public void registerKey(String keyAlias, Key key)
- {
- if(isKeyRegistered(keyAlias))
- {
- throw new IllegalArgumentException("Key " + keyAlias + " is already registered");
- }
-
- // register the key by creating an attribute that stores a guid and its encrypted value
- String guid = GUID.generate();
-
- KeyMap keys = new KeyMap();
- keys.setKey(keyAlias, key);
- Encryptor encryptor = getEncryptor(keys);
- Serializable encrypted = encryptor.sealObject(keyAlias, null, guid);
- Pair keyCheck = new Pair(guid, encrypted);
- attributeService.createAttribute(keyCheck, TOP_LEVEL_KEY, keyAlias);
- logger.info("Registered key " + keyAlias);
- }
-
- public void unregisterKey(String keyAlias)
- {
- attributeService.removeAttribute(TOP_LEVEL_KEY, keyAlias);
- }
-
- public boolean isKeyRegistered(String keyAlias)
- {
- try
- {
- return (attributeService.getAttribute(TOP_LEVEL_KEY, keyAlias) != null);
- }
- catch(Throwable e)
- {
- // there is an issue getting the attribute. Remove it.
- attributeService.removeAttribute(TOP_LEVEL_KEY, keyAlias);
- return (attributeService.getAttribute(TOP_LEVEL_KEY, keyAlias) != null);
- }
- }
-
- public List getRegisteredKeys(final Set keyStoreKeys)
- {
- final List registeredKeys = new ArrayList();
-
- attributeService.getAttributes(new AttributeQueryCallback()
- {
- public boolean handleAttribute(Long id, Serializable value,
- Serializable[] keys)
- {
- // Add as a registered key if the keystore contains the key
- String keyAlias = (String)keys[1];
- if(keyStoreKeys.contains(keyAlias))
- {
- registeredKeys.add(keyAlias);
- }
- return true;
- }
-
- },
- TOP_LEVEL_KEY);
-
- return registeredKeys;
- }
-
- @SuppressWarnings("unchecked")
- public KEY_STATUS checkKey(String keyAlias, Key key)
- {
- Pair keyCheck = null;
-
- if(attributeService.exists(TOP_LEVEL_KEY, keyAlias))
- {
- try
- {
- // check that the key has not changed by decrypting the encrypted guid attribute
- // comparing against the guid
- try
- {
- keyCheck = (Pair)attributeService.getAttribute(TOP_LEVEL_KEY, keyAlias);
- }
- catch(Throwable e)
- {
- // there is an issue getting the attribute. Remove it.
- attributeService.removeAttribute(TOP_LEVEL_KEY, keyAlias);
- return KEY_STATUS.MISSING;
- }
-
- if(keyCheck == null)
- {
- return KEY_STATUS.MISSING;
- }
-
- KeyMap keys = new KeyMap();
- keys.setKey(keyAlias, key);
- Encryptor encryptor = getEncryptor(keys);
- Serializable storedGUID = encryptor.unsealObject(keyAlias, keyCheck.getSecond());
- return EqualsHelper.nullSafeEquals(storedGUID, keyCheck.getFirst()) ? KEY_STATUS.OK : KEY_STATUS.CHANGED;
- }
- catch(InvalidKeyException e)
- {
- // key exception indicates that the key has changed - it can't decrypt the
- // previously-encrypted data
- return KEY_STATUS.CHANGED;
- }
- }
- else
- {
- return KEY_STATUS.MISSING;
- }
- }
-
- // note that this removes _all_ keys in the keystore. Use with care.
- public void removeRegisteredKeys(final Set keys)
- {
- RetryingTransactionHelper retryingTransactionHelper = transactionService.getRetryingTransactionHelper();
- final RetryingTransactionCallback removeKeysCallback = new RetryingTransactionCallback()
- {
- public Void execute() throws Throwable
- {
- for(String keyAlias : keys)
- {
- attributeService.removeAttribute(TOP_LEVEL_KEY, keyAlias);
- }
-
- return null;
- }
- };
- retryingTransactionHelper.doInTransaction(removeKeysCallback, false);
- }
-}
+package org.alfresco.encryption;
+
+import java.io.Serializable;
+import java.security.InvalidKeyException;
+import java.security.Key;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+
+import org.alfresco.repo.transaction.RetryingTransactionHelper;
+import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
+import org.alfresco.service.cmr.attributes.AttributeService;
+import org.alfresco.service.cmr.attributes.AttributeService.AttributeQueryCallback;
+import org.alfresco.service.transaction.TransactionService;
+import org.alfresco.util.EqualsHelper;
+import org.alfresco.util.GUID;
+import org.alfresco.util.Pair;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+/**
+ * Registered Encryption Keys are stored in the AttributeService directly under a top level key defined by
+ * TOP_LEVEL_KEY (which means that all key aliases must be unique across however many keystores are being used).
+ *
+ * @since 4.0
+ *
+ */
+// TODO caching? This will probably not be used extensively.
+// TODO instead of persisting the Pair when registering a key, create two attributes per key (one for the
+// guid and one for the encrypted value of the guid). This means a custom class does not need to be bound to
+// the attribute service.
+public class EncryptionKeysRegistryImpl implements EncryptionKeysRegistry
+{
+ public static String TOP_LEVEL_KEY = "keyCheck";
+ private static final Log logger = LogFactory.getLog(EncryptionKeysRegistryImpl.class);
+
+ private TransactionService transactionService;
+ private AttributeService attributeService;
+ private String cipherAlgorithm;
+ private String cipherProvider;
+
+ public void setAttributeService(AttributeService attributeService)
+ {
+ this.attributeService = attributeService;
+ }
+
+ public void setCipherAlgorithm(String cipherAlgorithm)
+ {
+ this.cipherAlgorithm = cipherAlgorithm;
+ }
+
+ public void setCipherProvider(String cipherProvider)
+ {
+ this.cipherProvider = cipherProvider;
+ }
+
+ public void setTransactionService(TransactionService transactionService)
+ {
+ this.transactionService = transactionService;
+ }
+
+ protected Encryptor getEncryptor(final KeyMap keys)
+ {
+ DefaultEncryptor encryptor = new DefaultEncryptor();
+ encryptor.setCipherAlgorithm(cipherAlgorithm);
+ encryptor.setCipherProvider(cipherProvider);
+ encryptor.setKeyProvider(new KeyProvider()
+ {
+ @Override
+ public Key getKey(String keyAlias)
+ {
+ return keys.getCachedKey(keyAlias).getKey();
+ }
+ });
+ return encryptor;
+ }
+
+ public void init()
+ {
+ }
+
+ public void registerKey(String keyAlias, Key key)
+ {
+ if(isKeyRegistered(keyAlias))
+ {
+ throw new IllegalArgumentException("Key " + keyAlias + " is already registered");
+ }
+
+ // register the key by creating an attribute that stores a guid and its encrypted value
+ String guid = GUID.generate();
+
+ KeyMap keys = new KeyMap();
+ keys.setKey(keyAlias, key);
+ Encryptor encryptor = getEncryptor(keys);
+ Serializable encrypted = encryptor.sealObject(keyAlias, null, guid);
+ Pair keyCheck = new Pair(guid, encrypted);
+ attributeService.createAttribute(keyCheck, TOP_LEVEL_KEY, keyAlias);
+ logger.info("Registered key " + keyAlias);
+ }
+
+ public void unregisterKey(String keyAlias)
+ {
+ attributeService.removeAttribute(TOP_LEVEL_KEY, keyAlias);
+ }
+
+ public boolean isKeyRegistered(String keyAlias)
+ {
+ try
+ {
+ return (attributeService.getAttribute(TOP_LEVEL_KEY, keyAlias) != null);
+ }
+ catch(Throwable e)
+ {
+ // there is an issue getting the attribute. Remove it.
+ attributeService.removeAttribute(TOP_LEVEL_KEY, keyAlias);
+ return (attributeService.getAttribute(TOP_LEVEL_KEY, keyAlias) != null);
+ }
+ }
+
+ public List getRegisteredKeys(final Set keyStoreKeys)
+ {
+ final List registeredKeys = new ArrayList();
+
+ attributeService.getAttributes(new AttributeQueryCallback()
+ {
+ public boolean handleAttribute(Long id, Serializable value,
+ Serializable[] keys)
+ {
+ // Add as a registered key if the keystore contains the key
+ String keyAlias = (String)keys[1];
+ if(keyStoreKeys.contains(keyAlias))
+ {
+ registeredKeys.add(keyAlias);
+ }
+ return true;
+ }
+
+ },
+ TOP_LEVEL_KEY);
+
+ return registeredKeys;
+ }
+
+ @SuppressWarnings("unchecked")
+ public KEY_STATUS checkKey(String keyAlias, Key key)
+ {
+ Pair keyCheck = null;
+
+ if(attributeService.exists(TOP_LEVEL_KEY, keyAlias))
+ {
+ try
+ {
+ // check that the key has not changed by decrypting the encrypted guid attribute
+ // comparing against the guid
+ try
+ {
+ keyCheck = (Pair)attributeService.getAttribute(TOP_LEVEL_KEY, keyAlias);
+ }
+ catch(Throwable e)
+ {
+ // there is an issue getting the attribute. Remove it.
+ attributeService.removeAttribute(TOP_LEVEL_KEY, keyAlias);
+ return KEY_STATUS.MISSING;
+ }
+
+ if(keyCheck == null)
+ {
+ return KEY_STATUS.MISSING;
+ }
+
+ KeyMap keys = new KeyMap();
+ keys.setKey(keyAlias, key);
+ Encryptor encryptor = getEncryptor(keys);
+ Serializable storedGUID = encryptor.unsealObject(keyAlias, keyCheck.getSecond());
+ return EqualsHelper.nullSafeEquals(storedGUID, keyCheck.getFirst()) ? KEY_STATUS.OK : KEY_STATUS.CHANGED;
+ }
+ catch(InvalidKeyException e)
+ {
+ // key exception indicates that the key has changed - it can't decrypt the
+ // previously-encrypted data
+ return KEY_STATUS.CHANGED;
+ }
+ }
+ else
+ {
+ return KEY_STATUS.MISSING;
+ }
+ }
+
+ // note that this removes _all_ keys in the keystore. Use with care.
+ public void removeRegisteredKeys(final Set keys)
+ {
+ RetryingTransactionHelper retryingTransactionHelper = transactionService.getRetryingTransactionHelper();
+ final RetryingTransactionCallback removeKeysCallback = new RetryingTransactionCallback()
+ {
+ public Void execute() throws Throwable
+ {
+ for(String keyAlias : keys)
+ {
+ attributeService.removeAttribute(TOP_LEVEL_KEY, keyAlias);
+ }
+
+ return null;
+ }
+ };
+ retryingTransactionHelper.doInTransaction(removeKeysCallback, false);
+ }
+}
diff --git a/source/java/org/alfresco/encryption/KeyStoreChecker.java b/source/java/org/alfresco/encryption/KeyStoreChecker.java
index 78be80a2a0..613f63f9e6 100644
--- a/source/java/org/alfresco/encryption/KeyStoreChecker.java
+++ b/source/java/org/alfresco/encryption/KeyStoreChecker.java
@@ -1,30 +1,30 @@
-package org.alfresco.encryption;
-
-/**
- * Checks the repository key stores.
- *
- * @since 4.0
- *
- */
-public class KeyStoreChecker
-{
- private AlfrescoKeyStore mainKeyStore;
-
- public KeyStoreChecker()
- {
- }
-
- public void setMainKeyStore(AlfrescoKeyStore mainKeyStore)
- {
- this.mainKeyStore = mainKeyStore;
- }
-
- public void validateKeyStores() throws InvalidKeystoreException, MissingKeyException
- {
- mainKeyStore.validateKeys();
- if(!mainKeyStore.exists())
- {
- mainKeyStore.create();
- }
- }
-}
+package org.alfresco.encryption;
+
+/**
+ * Checks the repository key stores.
+ *
+ * @since 4.0
+ *
+ */
+public class KeyStoreChecker
+{
+ private AlfrescoKeyStore mainKeyStore;
+
+ public KeyStoreChecker()
+ {
+ }
+
+ public void setMainKeyStore(AlfrescoKeyStore mainKeyStore)
+ {
+ this.mainKeyStore = mainKeyStore;
+ }
+
+ public void validateKeyStores() throws InvalidKeystoreException, MissingKeyException
+ {
+ mainKeyStore.validateKeys();
+ if(!mainKeyStore.exists())
+ {
+ mainKeyStore.create();
+ }
+ }
+}
diff --git a/source/java/org/alfresco/encryption/ReEncryptor.java b/source/java/org/alfresco/encryption/ReEncryptor.java
index 5ecb10a9f6..317891a2a0 100644
--- a/source/java/org/alfresco/encryption/ReEncryptor.java
+++ b/source/java/org/alfresco/encryption/ReEncryptor.java
@@ -1,335 +1,335 @@
-package org.alfresco.encryption;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Set;
-
-import javax.crypto.SealedObject;
-
-import org.alfresco.repo.batch.BatchProcessWorkProvider;
-import org.alfresco.repo.batch.BatchProcessor;
-import org.alfresco.repo.dictionary.DictionaryDAO;
-import org.alfresco.repo.domain.node.NodeDAO;
-import org.alfresco.repo.domain.node.NodePropertyEntity;
-import org.alfresco.repo.domain.node.NodePropertyKey;
-import org.alfresco.repo.domain.node.NodePropertyValue;
-import org.alfresco.repo.domain.qname.QNameDAO;
-import org.alfresco.repo.lock.JobLockService;
-import org.alfresco.repo.lock.LockAcquisitionException;
-import org.alfresco.repo.node.encryption.MetadataEncryptor;
-import org.alfresco.repo.transaction.RetryingTransactionHelper;
-import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
-import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
-import org.alfresco.service.cmr.dictionary.PropertyDefinition;
-import org.alfresco.service.namespace.NamespaceService;
-import org.alfresco.service.namespace.QName;
-import org.alfresco.service.transaction.TransactionService;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.springframework.beans.BeansException;
-import org.springframework.context.ApplicationContext;
-import org.springframework.context.ApplicationContextAware;
-
-/**
- * Re-encrypts encryptable repository properties using a new set of encryption keys.
- * Decrypts the repository properties using the default encryptor, falling back to
- * a backup decryptor (using the old encryption keys) if necessary, and then re-encrypts
- * the properties.
- *
- * Can run in one of two ways:
- *
- *
- *
during bootstrap.
- *
by using JMX (available only to Enterprise). In this case, the system can stay running while the re-encryption takes place.
- *
- *
- * @since 4.0
- */
-public class ReEncryptor implements ApplicationContextAware
-{
- private static Log logger = LogFactory.getLog(ReEncryptor.class);
-
- private NodeDAO nodeDAO;
- private DictionaryDAO dictionaryDAO;
- private QNameDAO qnameDAO;
-
- private MetadataEncryptor metadataEncryptor;
-
- private ApplicationContext applicationContext;
- private TransactionService transactionService;
- private RetryingTransactionHelper transactionHelper;
-
- private int numThreads;
- private int chunkSize;
- private boolean splitTxns = true;
-
- private static final QName LOCK = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "OrphanReaper");
- private JobLockService jobLockService;
-
- /**
- * Set the transaction provider so that each execution can be performed within a transaction
- */
- public void setTransactionService(TransactionService transactionService)
- {
- this.transactionService = transactionService;
- this.transactionHelper = transactionService.getRetryingTransactionHelper();
- }
-
- public void setMetadataEncryptor(MetadataEncryptor metadataEncryptor)
- {
- this.metadataEncryptor = metadataEncryptor;
- }
-
- public MetadataEncryptor getMetadataEncryptor()
- {
- return metadataEncryptor;
- }
-
- public void setJobLockService(JobLockService jobLockService)
- {
- this.jobLockService = jobLockService;
- }
-
- public void setNumThreads(int numThreads)
- {
- this.numThreads = numThreads;
- }
-
- public void setChunkSize(int chunkSize)
- {
- this.chunkSize = chunkSize;
- }
-
- public void setSplitTxns(boolean splitTxns)
- {
- this.splitTxns = splitTxns;
- }
-
- public void setNodeDAO(NodeDAO nodeDAO)
- {
- this.nodeDAO = nodeDAO;
- }
-
- public void setDictionaryDAO(DictionaryDAO dictionaryDAO)
- {
- this.dictionaryDAO = dictionaryDAO;
- }
-
- public void setQnameDAO(QNameDAO qnameDAO)
- {
- this.qnameDAO = qnameDAO;
- }
-
- /**
- * Attempts to get the lock. If the lock couldn't be taken, then null is returned.
- *
- * @return Returns the lock token or null
- */
- private String getLock(long time)
- {
- try
- {
- return jobLockService.getLock(LOCK, time);
- }
- catch (LockAcquisitionException e)
- {
- return null;
- }
- }
-
- /**
- * Attempts to get the lock. If it fails, the current transaction is marked for rollback.
- */
- private void refreshLock(String lockToken, long time)
- {
- if (lockToken == null)
- {
- throw new IllegalArgumentException("Must provide existing lockToken");
- }
- jobLockService.refreshLock(lockToken, LOCK, time);
- }
-
- protected void reEncryptProperties(final List properties, final String lockToken)
- {
- final Iterator it = properties.iterator();
-
- // TODO use BatchProcessWorkerAdaptor?
-
- BatchProcessor.BatchProcessWorker worker = new BatchProcessor.BatchProcessWorker()
- {
- public String getIdentifier(NodePropertyEntity entity)
- {
- return String.valueOf(entity.getNodeId());
- }
-
- public void beforeProcess() throws Throwable
- {
- refreshLock(lockToken, chunkSize * 100L);
- }
-
- public void afterProcess() throws Throwable
- {
- }
-
- public void process(final NodePropertyEntity entity) throws Throwable
- {
- NodePropertyValue nodePropValue = entity.getValue();
- // TODO check that we have the correct type i.e. can be cast to Serializable
- Serializable value = nodePropValue.getSerializableValue();
- if(value instanceof SealedObject)
- {
- SealedObject sealed = (SealedObject)value;
-
- NodePropertyKey propertyKey = entity.getKey();
- QName propertyQName = qnameDAO.getQName(propertyKey.getQnameId()).getSecond();
-
- // decrypt...
- Serializable decrypted = metadataEncryptor.decrypt(propertyQName, sealed);
-
- // ...and then re-encrypt. The new key will be used.
- Serializable resealed = metadataEncryptor.encrypt(propertyQName, decrypted);
-
- // TODO update resealed using batch update?
- // does the node DAO do batch updating?
- nodeDAO.setNodeProperties(entity.getNodeId(), Collections.singletonMap(propertyQName, resealed));
- }
- else
- {
- NodePropertyKey nodeKey = entity.getKey();
- QName propertyQName = qnameDAO.getQName(nodeKey.getQnameId()).getSecond();
- logger.warn("Encountered an encrypted property that is not a SealedObject, for node id " +
- entity.getNodeId() + ", property " + propertyQName);
- }
- }
- };
-
- BatchProcessWorkProvider provider = new BatchProcessWorkProvider()
- {
- @Override
- public int getTotalEstimatedWorkSize()
- {
- return properties.size();
- }
-
- @Override
- public Collection getNextWork()
- {
- List sublist = new ArrayList(chunkSize);
-
- synchronized(it)
- {
- int count = 0;
- while(it.hasNext() && count < chunkSize)
- {
- sublist.add(it.next());
- count++;
- }
- }
-
- return sublist;
- }
- };
-
- new BatchProcessor(
- "Reencryptor",
- transactionHelper,
- provider,
- numThreads, chunkSize,
- applicationContext,
- logger, 100).process(worker, splitTxns);
- }
-
- /**
- * Re-encrypt using the configured backup keystore to decrypt and the main keystore to encrypt
- */
- public int bootstrapReEncrypt() throws MissingKeyException
- {
- if(!metadataEncryptor.backupKeyAvailable(KeyProvider.ALIAS_METADATA))
- {
- throw new MissingKeyException("Backup key store is either not present or does not contain a metadata encryption key");
- }
- return reEncrypt();
- }
-
- /**
- * Re-encrypt by decrypting using the configured keystore and encrypting using a keystore configured using the provided new key store parameters.
- * Called from e.g. JMX.
- *
- * Assumes that the main key store has been already been reloaded.
- *
- * Note: it is the responsibility of the end user to ensure that the underlying keystores have been set up appropriately
- * i.e. the old key store is backed up to the location defined by the property '${dir.keystore}/backup-keystore' and the new
- * key store replaces it. This can be done while the repository is running.
- */
- public int reEncrypt() throws MissingKeyException
- {
- if(!metadataEncryptor.keyAvailable(KeyProvider.ALIAS_METADATA))
- {
- throw new MissingKeyException("Main key store is either not present or does not contain a metadata encryption key");
- }
- if(!metadataEncryptor.backupKeyAvailable(KeyProvider.ALIAS_METADATA))
- {
- throw new MissingKeyException("Backup key store is either not present or does not contain a metadata encryption key");
- }
-
- int numProps = reEncryptImpl();
- return numProps;
- }
-
- protected int reEncryptImpl()
- {
- // Take out a re-encryptor lock
- RetryingTransactionCallback txnWork = new RetryingTransactionCallback()
- {
- public String execute() throws Exception
- {
- String lockToken = getLock(20000L);
- return lockToken;
- }
- };
-
- String lockToken = transactionService.getRetryingTransactionHelper().doInTransaction(txnWork, false, true);
- if(lockToken == null)
- {
- logger.warn("Can't get lock. Assume multiple re-encryptors ...");
- return 0;
- }
-
- // get encrypted properties
- Collection propertyDefs = dictionaryDAO.getPropertiesOfDataType(DataTypeDefinition.ENCRYPTED);
- Set qnames = new HashSet();
- for(PropertyDefinition propDef : propertyDefs)
- {
- qnames.add(propDef.getName());
- }
-
- // TODO use callback mechanism, or select based on set of nodes?
- List properties = nodeDAO.selectNodePropertiesByTypes(qnames);
-
- if(logger.isDebugEnabled())
- {
- logger.debug("Found " + properties.size() + " properties to re-encrypt...");
- }
-
- // reencrypt these properties TODO don't call if num props == 0
- reEncryptProperties(properties, lockToken);
-
- if(logger.isDebugEnabled())
- {
- logger.debug("...done re-encrypting.");
- }
-
- return properties.size();
- }
-
- @Override
- public void setApplicationContext(ApplicationContext applicationContext) throws BeansException
- {
- this.applicationContext = applicationContext;
- }
-}
+package org.alfresco.encryption;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+
+import javax.crypto.SealedObject;
+
+import org.alfresco.repo.batch.BatchProcessWorkProvider;
+import org.alfresco.repo.batch.BatchProcessor;
+import org.alfresco.repo.dictionary.DictionaryDAO;
+import org.alfresco.repo.domain.node.NodeDAO;
+import org.alfresco.repo.domain.node.NodePropertyEntity;
+import org.alfresco.repo.domain.node.NodePropertyKey;
+import org.alfresco.repo.domain.node.NodePropertyValue;
+import org.alfresco.repo.domain.qname.QNameDAO;
+import org.alfresco.repo.lock.JobLockService;
+import org.alfresco.repo.lock.LockAcquisitionException;
+import org.alfresco.repo.node.encryption.MetadataEncryptor;
+import org.alfresco.repo.transaction.RetryingTransactionHelper;
+import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
+import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
+import org.alfresco.service.cmr.dictionary.PropertyDefinition;
+import org.alfresco.service.namespace.NamespaceService;
+import org.alfresco.service.namespace.QName;
+import org.alfresco.service.transaction.TransactionService;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.springframework.beans.BeansException;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
+
+/**
+ * Re-encrypts encryptable repository properties using a new set of encryption keys.
+ * Decrypts the repository properties using the default encryptor, falling back to
+ * a backup decryptor (using the old encryption keys) if necessary, and then re-encrypts
+ * the properties.
+ *
+ * Can run in one of two ways:
+ *
+ *
+ *
during bootstrap.
+ *
by using JMX (available only to Enterprise). In this case, the system can stay running while the re-encryption takes place.
+ *
+ *
+ * @since 4.0
+ */
+public class ReEncryptor implements ApplicationContextAware
+{
+ private static Log logger = LogFactory.getLog(ReEncryptor.class);
+
+ private NodeDAO nodeDAO;
+ private DictionaryDAO dictionaryDAO;
+ private QNameDAO qnameDAO;
+
+ private MetadataEncryptor metadataEncryptor;
+
+ private ApplicationContext applicationContext;
+ private TransactionService transactionService;
+ private RetryingTransactionHelper transactionHelper;
+
+ private int numThreads;
+ private int chunkSize;
+ private boolean splitTxns = true;
+
+ private static final QName LOCK = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "OrphanReaper");
+ private JobLockService jobLockService;
+
+ /**
+ * Set the transaction provider so that each execution can be performed within a transaction
+ */
+ public void setTransactionService(TransactionService transactionService)
+ {
+ this.transactionService = transactionService;
+ this.transactionHelper = transactionService.getRetryingTransactionHelper();
+ }
+
+ public void setMetadataEncryptor(MetadataEncryptor metadataEncryptor)
+ {
+ this.metadataEncryptor = metadataEncryptor;
+ }
+
+ public MetadataEncryptor getMetadataEncryptor()
+ {
+ return metadataEncryptor;
+ }
+
+ public void setJobLockService(JobLockService jobLockService)
+ {
+ this.jobLockService = jobLockService;
+ }
+
+ public void setNumThreads(int numThreads)
+ {
+ this.numThreads = numThreads;
+ }
+
+ public void setChunkSize(int chunkSize)
+ {
+ this.chunkSize = chunkSize;
+ }
+
+ public void setSplitTxns(boolean splitTxns)
+ {
+ this.splitTxns = splitTxns;
+ }
+
+ public void setNodeDAO(NodeDAO nodeDAO)
+ {
+ this.nodeDAO = nodeDAO;
+ }
+
+ public void setDictionaryDAO(DictionaryDAO dictionaryDAO)
+ {
+ this.dictionaryDAO = dictionaryDAO;
+ }
+
+ public void setQnameDAO(QNameDAO qnameDAO)
+ {
+ this.qnameDAO = qnameDAO;
+ }
+
+ /**
+ * Attempts to get the lock. If the lock couldn't be taken, then null is returned.
+ *
+ * @return Returns the lock token or null
+ */
+ private String getLock(long time)
+ {
+ try
+ {
+ return jobLockService.getLock(LOCK, time);
+ }
+ catch (LockAcquisitionException e)
+ {
+ return null;
+ }
+ }
+
+ /**
+ * Attempts to get the lock. If it fails, the current transaction is marked for rollback.
+ */
+ private void refreshLock(String lockToken, long time)
+ {
+ if (lockToken == null)
+ {
+ throw new IllegalArgumentException("Must provide existing lockToken");
+ }
+ jobLockService.refreshLock(lockToken, LOCK, time);
+ }
+
+ protected void reEncryptProperties(final List properties, final String lockToken)
+ {
+ final Iterator it = properties.iterator();
+
+ // TODO use BatchProcessWorkerAdaptor?
+
+ BatchProcessor.BatchProcessWorker worker = new BatchProcessor.BatchProcessWorker()
+ {
+ public String getIdentifier(NodePropertyEntity entity)
+ {
+ return String.valueOf(entity.getNodeId());
+ }
+
+ public void beforeProcess() throws Throwable
+ {
+ refreshLock(lockToken, chunkSize * 100L);
+ }
+
+ public void afterProcess() throws Throwable
+ {
+ }
+
+ public void process(final NodePropertyEntity entity) throws Throwable
+ {
+ NodePropertyValue nodePropValue = entity.getValue();
+ // TODO check that we have the correct type i.e. can be cast to Serializable
+ Serializable value = nodePropValue.getSerializableValue();
+ if(value instanceof SealedObject)
+ {
+ SealedObject sealed = (SealedObject)value;
+
+ NodePropertyKey propertyKey = entity.getKey();
+ QName propertyQName = qnameDAO.getQName(propertyKey.getQnameId()).getSecond();
+
+ // decrypt...
+ Serializable decrypted = metadataEncryptor.decrypt(propertyQName, sealed);
+
+ // ...and then re-encrypt. The new key will be used.
+ Serializable resealed = metadataEncryptor.encrypt(propertyQName, decrypted);
+
+ // TODO update resealed using batch update?
+ // does the node DAO do batch updating?
+ nodeDAO.setNodeProperties(entity.getNodeId(), Collections.singletonMap(propertyQName, resealed));
+ }
+ else
+ {
+ NodePropertyKey nodeKey = entity.getKey();
+ QName propertyQName = qnameDAO.getQName(nodeKey.getQnameId()).getSecond();
+ logger.warn("Encountered an encrypted property that is not a SealedObject, for node id " +
+ entity.getNodeId() + ", property " + propertyQName);
+ }
+ }
+ };
+
+ BatchProcessWorkProvider provider = new BatchProcessWorkProvider()
+ {
+ @Override
+ public int getTotalEstimatedWorkSize()
+ {
+ return properties.size();
+ }
+
+ @Override
+ public Collection getNextWork()
+ {
+ List sublist = new ArrayList(chunkSize);
+
+ synchronized(it)
+ {
+ int count = 0;
+ while(it.hasNext() && count < chunkSize)
+ {
+ sublist.add(it.next());
+ count++;
+ }
+ }
+
+ return sublist;
+ }
+ };
+
+ new BatchProcessor(
+ "Reencryptor",
+ transactionHelper,
+ provider,
+ numThreads, chunkSize,
+ applicationContext,
+ logger, 100).process(worker, splitTxns);
+ }
+
+ /**
+ * Re-encrypt using the configured backup keystore to decrypt and the main keystore to encrypt
+ */
+ public int bootstrapReEncrypt() throws MissingKeyException
+ {
+ if(!metadataEncryptor.backupKeyAvailable(KeyProvider.ALIAS_METADATA))
+ {
+ throw new MissingKeyException("Backup key store is either not present or does not contain a metadata encryption key");
+ }
+ return reEncrypt();
+ }
+
+ /**
+ * Re-encrypt by decrypting using the configured keystore and encrypting using a keystore configured using the provided new key store parameters.
+ * Called from e.g. JMX.
+ *
+ * Assumes that the main key store has been already been reloaded.
+ *
+ * Note: it is the responsibility of the end user to ensure that the underlying keystores have been set up appropriately
+ * i.e. the old key store is backed up to the location defined by the property '${dir.keystore}/backup-keystore' and the new
+ * key store replaces it. This can be done while the repository is running.
+ */
+ public int reEncrypt() throws MissingKeyException
+ {
+ if(!metadataEncryptor.keyAvailable(KeyProvider.ALIAS_METADATA))
+ {
+ throw new MissingKeyException("Main key store is either not present or does not contain a metadata encryption key");
+ }
+ if(!metadataEncryptor.backupKeyAvailable(KeyProvider.ALIAS_METADATA))
+ {
+ throw new MissingKeyException("Backup key store is either not present or does not contain a metadata encryption key");
+ }
+
+ int numProps = reEncryptImpl();
+ return numProps;
+ }
+
+ protected int reEncryptImpl()
+ {
+ // Take out a re-encryptor lock
+ RetryingTransactionCallback txnWork = new RetryingTransactionCallback()
+ {
+ public String execute() throws Exception
+ {
+ String lockToken = getLock(20000L);
+ return lockToken;
+ }
+ };
+
+ String lockToken = transactionService.getRetryingTransactionHelper().doInTransaction(txnWork, false, true);
+ if(lockToken == null)
+ {
+ logger.warn("Can't get lock. Assume multiple re-encryptors ...");
+ return 0;
+ }
+
+ // get encrypted properties
+ Collection propertyDefs = dictionaryDAO.getPropertiesOfDataType(DataTypeDefinition.ENCRYPTED);
+ Set qnames = new HashSet();
+ for(PropertyDefinition propDef : propertyDefs)
+ {
+ qnames.add(propDef.getName());
+ }
+
+ // TODO use callback mechanism, or select based on set of nodes?
+ List properties = nodeDAO.selectNodePropertiesByTypes(qnames);
+
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("Found " + properties.size() + " properties to re-encrypt...");
+ }
+
+ // reencrypt these properties TODO don't call if num props == 0
+ reEncryptProperties(properties, lockToken);
+
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("...done re-encrypting.");
+ }
+
+ return properties.size();
+ }
+
+ @Override
+ public void setApplicationContext(ApplicationContext applicationContext) throws BeansException
+ {
+ this.applicationContext = applicationContext;
+ }
+}
diff --git a/source/java/org/alfresco/filesys/alfresco/NetworkFileLegacyReferenceCount.java b/source/java/org/alfresco/filesys/alfresco/NetworkFileLegacyReferenceCount.java
index e933c2b43a..752c16d391 100644
--- a/source/java/org/alfresco/filesys/alfresco/NetworkFileLegacyReferenceCount.java
+++ b/source/java/org/alfresco/filesys/alfresco/NetworkFileLegacyReferenceCount.java
@@ -1,29 +1,29 @@
-package org.alfresco.filesys.alfresco;
-
-/**
- * Does this NetworkFile have reference counting?
- */
-public interface NetworkFileLegacyReferenceCount
-{
- /**
- * Increment the file open count, first open = 1;
- *
- * @return the current open count
- */
- public int incrementLegacyOpenCount();
-
- /**
- * Decrement the file open count
- *
- * @return the current open count
- */
- public int decrementLagacyOpenCount();
-
- /**
- * Return the open file count
- *
- * @return the current open count
- */
- public int getLegacyOpenCount();
-
-}
+package org.alfresco.filesys.alfresco;
+
+/**
+ * Does this NetworkFile have reference counting?
+ */
+public interface NetworkFileLegacyReferenceCount
+{
+ /**
+ * Increment the file open count, first open = 1;
+ *
+ * @return the current open count
+ */
+ public int incrementLegacyOpenCount();
+
+ /**
+ * Decrement the file open count
+ *
+ * @return the current open count
+ */
+ public int decrementLagacyOpenCount();
+
+ /**
+ * Return the open file count
+ *
+ * @return the current open count
+ */
+ public int getLegacyOpenCount();
+
+}
diff --git a/source/java/org/alfresco/filesys/alfresco/PseudoFileOverlay.java b/source/java/org/alfresco/filesys/alfresco/PseudoFileOverlay.java
index c99a03bf92..1747445063 100644
--- a/source/java/org/alfresco/filesys/alfresco/PseudoFileOverlay.java
+++ b/source/java/org/alfresco/filesys/alfresco/PseudoFileOverlay.java
@@ -1,44 +1,44 @@
-package org.alfresco.filesys.alfresco;
-
-import org.alfresco.jlan.server.filesys.pseudo.PseudoFile;
-import org.alfresco.jlan.server.filesys.pseudo.PseudoFileList;
-import org.alfresco.service.cmr.repository.NodeRef;
-
-public interface PseudoFileOverlay
-{
- /**
- * Is this a pseudo file?
- * @param parentDir NodeRef
- * @param name String
- * @return true the file is a pseudo file
- */
- public boolean isPseudoFile(NodeRef parentDir, String name);
-
- /**
- * Get the pseudo file
- * @param parentDir NodeRef
- * @param name String
- * @return the pseudoFile or null if there is no pseudo file
- */
- public PseudoFile getPseudoFile(NodeRef parentDir, String name);
-
- /**
- * Search for the pseudo files on the specified path
- * @param parentDir NodeRef
- * @param name String
- * @return list of pseudo files.
- */
- public PseudoFileList searchPseudoFiles(NodeRef parentDir, String name);
-
- /**
- * Delete a pseudo file.
- *
- * Pseudo files may need to be deleted for delete folder operations to work
- * correctly.
- *
- * A pseudo file can be deleted for a short time. However it may re-appear at some point
- * later since there is no permanent persistence of pseudo files which are ephemeral!
- */
- public void delete(NodeRef parentDir, String name);
-
-}
+package org.alfresco.filesys.alfresco;
+
+import org.alfresco.jlan.server.filesys.pseudo.PseudoFile;
+import org.alfresco.jlan.server.filesys.pseudo.PseudoFileList;
+import org.alfresco.service.cmr.repository.NodeRef;
+
+public interface PseudoFileOverlay
+{
+ /**
+ * Is this a pseudo file?
+ * @param parentDir NodeRef
+ * @param name String
+ * @return true the file is a pseudo file
+ */
+ public boolean isPseudoFile(NodeRef parentDir, String name);
+
+ /**
+ * Get the pseudo file
+ * @param parentDir NodeRef
+ * @param name String
+ * @return the pseudoFile or null if there is no pseudo file
+ */
+ public PseudoFile getPseudoFile(NodeRef parentDir, String name);
+
+ /**
+ * Search for the pseudo files on the specified path
+ * @param parentDir NodeRef
+ * @param name String
+ * @return list of pseudo files.
+ */
+ public PseudoFileList searchPseudoFiles(NodeRef parentDir, String name);
+
+ /**
+ * Delete a pseudo file.
+ *
+ * Pseudo files may need to be deleted for delete folder operations to work
+ * correctly.
+ *
+ * A pseudo file can be deleted for a short time. However it may re-appear at some point
+ * later since there is no permanent persistence of pseudo files which are ephemeral!
+ */
+ public void delete(NodeRef parentDir, String name);
+
+}
diff --git a/source/java/org/alfresco/filesys/alfresco/PseudoFileOverlayImpl.java b/source/java/org/alfresco/filesys/alfresco/PseudoFileOverlayImpl.java
index 5efc9de07a..2aae41204b 100644
--- a/source/java/org/alfresco/filesys/alfresco/PseudoFileOverlayImpl.java
+++ b/source/java/org/alfresco/filesys/alfresco/PseudoFileOverlayImpl.java
@@ -1,505 +1,505 @@
-package org.alfresco.filesys.alfresco;
-
-import java.io.Serializable;
-import java.util.Enumeration;
-import java.util.Map;
-
-import org.alfresco.filesys.repo.ContentDiskDriver2;
-import org.alfresco.jlan.server.filesys.FileInfo;
-import org.alfresco.jlan.server.filesys.FileName;
-import org.alfresco.jlan.server.filesys.pseudo.MemoryPseudoFile;
-import org.alfresco.jlan.server.filesys.pseudo.PseudoFile;
-import org.alfresco.jlan.server.filesys.pseudo.PseudoFileList;
-import org.alfresco.jlan.util.WildCard;
-import org.alfresco.model.ContentModel;
-import org.alfresco.repo.admin.SysAdminParams;
-import org.alfresco.repo.cache.SimpleCache;
-import org.alfresco.repo.site.SiteModel;
-import org.alfresco.service.cmr.repository.NodeRef;
-import org.alfresco.service.cmr.repository.NodeService;
-import org.alfresco.util.PropertyCheck;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-/**
- * Re-implementation of PseudoFiles for ContentDiskDriver2
- *
- * Overlays "desktop actions"
- *
- * @author mrogers
- * */
-public class PseudoFileOverlayImpl implements PseudoFileOverlay
-{
- private SysAdminParams sysAdminParams;
- private AlfrescoContext context;
- private NodeService nodeService;
- private SimpleCache deletePseudoFileCache;
-
- private static final Log logger = LogFactory.getLog(PseudoFileOverlayImpl.class);
-
- PseudoFileList pl = new PseudoFileList();
-
- public void init()
- {
- PropertyCheck.mandatory(this, "nodeService", getNodeService());
- PropertyCheck.mandatory(this, "context", context);
- PropertyCheck.mandatory(this, "sysAdminParams", sysAdminParams);
- PropertyCheck.mandatory(this, "deletePseudoFileCache", deletePseudoFileCache);
-
- DesktopActionTable actions = context.getDesktopActions();
-
- if(actions != null)
- {
- Enumeration actionNames = actions.enumerateActionNames();
-
- while(actionNames.hasMoreElements())
- {
- // Get the current desktop action
- String name = actionNames.nextElement();
- DesktopAction action = actions.getAction(name);
-
- // Add the pseudo file for the desktop action
-
- if ( action.hasPseudoFile())
- {
- PseudoFile file = action.getPseudoFile();
- pl.addFile(file);
- }
- }
- }
- }
-
- private PseudoFile generateAlfrescoURLShortcut(NodeRef nodeRef)
- {
- if ( context.isAlfrescoURLEnabled())
- {
- // Make sure the state has the associated node details
-
- // Build the URL file data
-
- StringBuilder urlStr = new StringBuilder();
-
- urlStr.append("[InternetShortcut]\r\n");
- urlStr.append("URL=");
- urlStr.append(getAlfrescoURLPrefix());
- urlStr.append("navigate/browse/workspace/SpacesStore/");
- urlStr.append( nodeRef.getId());
- urlStr.append("\r\n");
-
- // Create the in memory pseudo file for the URL link
-
- byte[] urlData = urlStr.toString().getBytes();
-
- MemoryPseudoFile urlFile = new MemoryPseudoFile( context.getURLFileName(), urlData);
- return urlFile;
- }
- return null;
- }
-
- /**
- * Return the site name if the node ref is in a document library
- * Return null if the document is not in a site
- */
- // MER URRGH - copied from IMAP service - I don't like it there either!
- private String getSiteForNode(NodeRef nodeRef)
- {
- if(logger.isDebugEnabled())
- {
- logger.debug("get site for node:" + nodeRef);
- }
- boolean isInDocLibrary = false;
-
- NodeRef parent = nodeService.getPrimaryParent(nodeRef).getParentRef();
-
- if(nodeService.getType(parent).equals(SiteModel.TYPE_SITE))
- {
- String folderName = (String) nodeService.getProperty(nodeRef, ContentModel.PROP_NAME);
- if(folderName.equalsIgnoreCase("documentlibrary"))
- {
- isInDocLibrary = true;
- }
- }
- else
- {
- while (parent != null && !nodeService.getType(parent).equals(SiteModel.TYPE_SITE))
- {
- String parentName = (String) nodeService.getProperty(parent, ContentModel.PROP_NAME);
- if (parentName.equalsIgnoreCase("documentlibrary"))
- {
- isInDocLibrary = true;
- }
-
- parent = nodeService.getPrimaryParent(parent).getParentRef();
- }
- }
-
- if (parent == null)
- {
- logger.debug("folder is not in a site");
- return null;
- }
- else
- {
- if(isInDocLibrary)
- {
- if(nodeService.getType(parent).equals(SiteModel.TYPE_SITE))
- {
- String siteName = (String)nodeService.getProperty(parent, ContentModel.PROP_NAME);
- if(logger.isDebugEnabled())
- {
- logger.debug("got a site:" + siteName);
- }
- return siteName;
- }
- }
- logger.debug("folder is not in doc library");
-
- return null;
- }
-
- }
-
- private PseudoFile generateShareURLShortcut(NodeRef nodeRef)
- {
- if(logger.isDebugEnabled())
- {
- logger.debug("generateShareURLShortcut nodeRef" + nodeRef);
- }
- if ( context.isShareURLEnabled())
- {
- String site = getSiteForNode(nodeRef);
- if(site != null)
- {
- // Make sure the state has the associated node details
- // Build the URL file data
-
- StringBuilder urlStr = new StringBuilder();
-
-// This is the URL generated
-// http://markr:8080/share/page/site/wibble/folder-details?nodeRef=workspace://SpacesStore/f72b2475-7571-46fe-947b-b0ee1b6a82ea
- urlStr.append("[InternetShortcut]\r\n");
- urlStr.append("URL=");
- urlStr.append(getShareURLPrefix());
- urlStr.append("page/site/");
- urlStr.append(site + "/folder-details?nodeRef=");
- urlStr.append(nodeRef.getStoreRef() + "/");
- urlStr.append( nodeRef.getId());
- urlStr.append("\r\n");
-
- // Should this be the URL instead
-// http://markr:8080/share/page/site/wibble/documentlibrary#filter=path%7C%2Ffolder%2520A%2FFolderB&page=1
-
- // Create the in memory pseudo file for the URL link
- if(logger.isDebugEnabled())
- {
- logger.debug("generateShareURLShortcut url as string:" + urlStr);
- }
- byte[] urlData = urlStr.toString().getBytes();
-
- MemoryPseudoFile urlFile = new MemoryPseudoFile( context.getShareURLFileName(), urlData);
- return urlFile;
- }
-
-
- }
- return null;
- }
-
-
- /**
- *
- */
- public boolean isPseudoFile(NodeRef parentDir, String name)
- {
- if ( parentDir == null)
- {
- return false;
- }
-
- if(context.isAlfrescoURLEnabled())
- {
- if(context.getURLFileName().equals(name))
- {
- return true;
- }
- }
-
- if(context.isShareURLEnabled())
- {
- if(context.getShareURLFileName().equals(name))
- {
- return true;
- }
- }
-
- if(getPseudoFile(parentDir, name) != null)
- {
- return true;
- }
- else
- {
- return false;
- }
- }
-
- /**
- * Get the pseudo file
- * @param parentDir NodeRef
- * @param fname String
- * @return the pseudoFile or null if there is no pseudo file
- */
- public PseudoFile getPseudoFile(NodeRef parentDir, String fname)
- {
- if ( parentDir == null)
- {
- return null;
- }
- if (isDeleted(parentDir, fname))
- {
- return null;
- }
- if(context.isAlfrescoURLEnabled())
- {
- if(context.getURLFileName().equals(fname))
- {
- if(logger.isDebugEnabled())
- {
- logger.debug("returning URL pseudo file");
- }
- return generateAlfrescoURLShortcut(parentDir);
- }
- }
-
- if(context.isShareURLEnabled())
- {
- if(context.getShareURLFileName().equals(fname))
- {
- if(logger.isDebugEnabled())
- {
- logger.debug("returning Share URL pseudo file");
- }
- return generateShareURLShortcut(parentDir);
- }
- }
-
- PseudoFile file = pl.findFile(fname, false);
- return file;
- }
-
- /**
- *
- */
- public PseudoFileList searchPseudoFiles(NodeRef parentDir, String name)
- {
- if(logger.isDebugEnabled())
- {
- logger.debug("searchPseudoFile parentDir:" + parentDir +", name:" + name);
- }
- //return pseudo files matching the path/pattern
-
- if ( parentDir == null || name == null || name.length() == 0 || name.equals("\\"))
- {
- return null;
- }
-
- String fname = name;
-
- if ( fname.equals( "*.*"))
- {
- fname = "*";
- }
-
- if ( WildCard.containsWildcards(fname))
- {
- // does contain wildcards
-
- // Check if the wildcard is for all files or a subset
-
- if ( fname.equals( "*"))
- {
- // Match all pseudo files
- PseudoFileList filterList = new PseudoFileList();
-
- // copy desktop actions which do not depend on parentDir
- for ( int i = 0; i < pl.numberOfFiles(); i++)
- {
- PseudoFile pseudoFile = pl.getFileAt(i);
- if(!isDeleted(parentDir, pseudoFile.getFileName()))
- {
- // File is not deleted
- filterList.addFile(pseudoFile);
- }
- }
-
- // The URL file is dependent upon the parent dir
- if(context.isAlfrescoURLEnabled())
- {
- if(!isDeleted(parentDir, context.getURLFileName()))
- {
- filterList.addFile(generateAlfrescoURLShortcut(parentDir));
- }
- else
- {
- if(logger.isDebugEnabled())
- {
- logger.debug("alfresco URL pseudo file deleted");
- }
- }
- }
-
- if(context.isShareURLEnabled())
- {
- if(!isDeleted(parentDir, context.getShareURLFileName()))
- {
- PseudoFile sharePseudoFile = generateShareURLShortcut(parentDir);
- if(sharePseudoFile != null)
- {
- filterList.addFile(sharePseudoFile);
- }
- }
- }
-
- return filterList;
- }
- else
- {
- // Generate a subset of pseudo files that match the wildcard search pattern
-
- WildCard wildCard = new WildCard( fname, false);
- PseudoFileList filterList = new PseudoFileList();
-
- for ( int i = 0; i < pl.numberOfFiles(); i++)
- {
- PseudoFile pseudoFile = pl.getFileAt( i);
- if ( wildCard.matchesPattern( pseudoFile.getFileName()))
- {
- if(!isDeleted(parentDir, pseudoFile.getFileName()))
- {
- // Add the pseudo file to the filtered list
- filterList.addFile( pseudoFile);
- }
- }
- }
-
- // The URL file is dependent upon the parent dir
- if(context.isAlfrescoURLEnabled())
- {
- if(wildCard.matchesPattern(context.getURLFileName()))
- {
- if(!isDeleted(parentDir, context.getURLFileName()))
- {
- filterList.addFile(generateAlfrescoURLShortcut(parentDir));
- }
- }
- }
-
- if(context.isShareURLEnabled())
- {
- if(wildCard.matchesPattern(context.getShareURLFileName()))
- {
- if(!isDeleted(parentDir, context.getShareURLFileName()))
- {
- PseudoFile sharePseudoFile = generateShareURLShortcut(parentDir);
-
- if(sharePseudoFile != null)
- {
- filterList.addFile(sharePseudoFile);
- }
- }
- }
- }
-
- return filterList;
- // Use the filtered pseudo file list, or null if there were no matches
- }
- }
- else
- {
- // does not contain wild cards
- PseudoFileList filterList = new PseudoFileList();
- PseudoFile file = getPseudoFile(parentDir, fname);
-
- if(file != null && !isDeleted(parentDir, fname))
- {
- filterList.addFile(file);
- }
-
- return filterList;
- }
- }
-
- @Override
- public void delete(NodeRef parentDir, String name)
- {
- if(logger.isDebugEnabled())
- {
- logger.debug("delete pseudo file parentDir:" + parentDir + ", name: " + name);
- }
- getDeletePseudoFileCache().put(toDeleteKey(parentDir, name), "Deleted");
- }
-
- private String toDeleteKey(NodeRef parentNoderef, String name)
- {
- return (parentNoderef.toString() + "/" + name + ":" + context.getDeviceName()).toLowerCase();
- }
-
- private boolean isDeleted(NodeRef parentDir, String name)
- {
- String key = toDeleteKey(parentDir, name);
- boolean isDeleted = getDeletePseudoFileCache().contains(key);
- if(logger.isDebugEnabled())
- {
- logger.debug("pseudoFile isDeleted: " + isDeleted + ", for name:" + name);
- }
- return isDeleted;
- }
-
- //
- public void setNodeService(NodeService nodeService)
- {
- this.nodeService = nodeService;
- }
-
- public NodeService getNodeService()
- {
- return nodeService;
- }
-
- public void setContext(AlfrescoContext context)
- {
- this.context = context;
- }
-
- public AlfrescoContext getContext()
- {
- return context;
- }
-
- private final String getAlfrescoURLPrefix()
- {
- return sysAdminParams.getAlfrescoProtocol() + "://" + sysAdminParams.getAlfrescoHost() + ":" + sysAdminParams.getAlfrescoPort() + "/" + sysAdminParams.getAlfrescoContext() + "/";
- }
- private final String getShareURLPrefix()
- {
- return sysAdminParams.getShareProtocol() + "://" + sysAdminParams.getShareHost() + ":" + sysAdminParams.getSharePort() + "/" + sysAdminParams.getShareContext() + "/";
- }
-
- public void setSysAdminParams(SysAdminParams sysAdminParams)
- {
- this.sysAdminParams = sysAdminParams;
- }
-
- public SysAdminParams getSysAdminParams()
- {
- return sysAdminParams;
- }
-
- public SimpleCache getDeletePseudoFileCache()
- {
- return deletePseudoFileCache;
- }
-
- public void setDeletePseudoFileCache(SimpleCache deletePseudoFileCache) {
- this.deletePseudoFileCache = deletePseudoFileCache;
- }
-}
+package org.alfresco.filesys.alfresco;
+
+import java.io.Serializable;
+import java.util.Enumeration;
+import java.util.Map;
+
+import org.alfresco.filesys.repo.ContentDiskDriver2;
+import org.alfresco.jlan.server.filesys.FileInfo;
+import org.alfresco.jlan.server.filesys.FileName;
+import org.alfresco.jlan.server.filesys.pseudo.MemoryPseudoFile;
+import org.alfresco.jlan.server.filesys.pseudo.PseudoFile;
+import org.alfresco.jlan.server.filesys.pseudo.PseudoFileList;
+import org.alfresco.jlan.util.WildCard;
+import org.alfresco.model.ContentModel;
+import org.alfresco.repo.admin.SysAdminParams;
+import org.alfresco.repo.cache.SimpleCache;
+import org.alfresco.repo.site.SiteModel;
+import org.alfresco.service.cmr.repository.NodeRef;
+import org.alfresco.service.cmr.repository.NodeService;
+import org.alfresco.util.PropertyCheck;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+/**
+ * Re-implementation of PseudoFiles for ContentDiskDriver2
+ *
+ * Overlays "desktop actions"
+ *
+ * @author mrogers
+ * */
+public class PseudoFileOverlayImpl implements PseudoFileOverlay
+{
+ private SysAdminParams sysAdminParams;
+ private AlfrescoContext context;
+ private NodeService nodeService;
+ private SimpleCache deletePseudoFileCache;
+
+ private static final Log logger = LogFactory.getLog(PseudoFileOverlayImpl.class);
+
+ PseudoFileList pl = new PseudoFileList();
+
+ public void init()
+ {
+ PropertyCheck.mandatory(this, "nodeService", getNodeService());
+ PropertyCheck.mandatory(this, "context", context);
+ PropertyCheck.mandatory(this, "sysAdminParams", sysAdminParams);
+ PropertyCheck.mandatory(this, "deletePseudoFileCache", deletePseudoFileCache);
+
+ DesktopActionTable actions = context.getDesktopActions();
+
+ if(actions != null)
+ {
+ Enumeration actionNames = actions.enumerateActionNames();
+
+ while(actionNames.hasMoreElements())
+ {
+ // Get the current desktop action
+ String name = actionNames.nextElement();
+ DesktopAction action = actions.getAction(name);
+
+ // Add the pseudo file for the desktop action
+
+ if ( action.hasPseudoFile())
+ {
+ PseudoFile file = action.getPseudoFile();
+ pl.addFile(file);
+ }
+ }
+ }
+ }
+
+ private PseudoFile generateAlfrescoURLShortcut(NodeRef nodeRef)
+ {
+ if ( context.isAlfrescoURLEnabled())
+ {
+ // Make sure the state has the associated node details
+
+ // Build the URL file data
+
+ StringBuilder urlStr = new StringBuilder();
+
+ urlStr.append("[InternetShortcut]\r\n");
+ urlStr.append("URL=");
+ urlStr.append(getAlfrescoURLPrefix());
+ urlStr.append("navigate/browse/workspace/SpacesStore/");
+ urlStr.append( nodeRef.getId());
+ urlStr.append("\r\n");
+
+ // Create the in memory pseudo file for the URL link
+
+ byte[] urlData = urlStr.toString().getBytes();
+
+ MemoryPseudoFile urlFile = new MemoryPseudoFile( context.getURLFileName(), urlData);
+ return urlFile;
+ }
+ return null;
+ }
+
+ /**
+ * Return the site name if the node ref is in a document library
+ * Return null if the document is not in a site
+ */
+ // MER URRGH - copied from IMAP service - I don't like it there either!
+ private String getSiteForNode(NodeRef nodeRef)
+ {
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("get site for node:" + nodeRef);
+ }
+ boolean isInDocLibrary = false;
+
+ NodeRef parent = nodeService.getPrimaryParent(nodeRef).getParentRef();
+
+ if(nodeService.getType(parent).equals(SiteModel.TYPE_SITE))
+ {
+ String folderName = (String) nodeService.getProperty(nodeRef, ContentModel.PROP_NAME);
+ if(folderName.equalsIgnoreCase("documentlibrary"))
+ {
+ isInDocLibrary = true;
+ }
+ }
+ else
+ {
+ while (parent != null && !nodeService.getType(parent).equals(SiteModel.TYPE_SITE))
+ {
+ String parentName = (String) nodeService.getProperty(parent, ContentModel.PROP_NAME);
+ if (parentName.equalsIgnoreCase("documentlibrary"))
+ {
+ isInDocLibrary = true;
+ }
+
+ parent = nodeService.getPrimaryParent(parent).getParentRef();
+ }
+ }
+
+ if (parent == null)
+ {
+ logger.debug("folder is not in a site");
+ return null;
+ }
+ else
+ {
+ if(isInDocLibrary)
+ {
+ if(nodeService.getType(parent).equals(SiteModel.TYPE_SITE))
+ {
+ String siteName = (String)nodeService.getProperty(parent, ContentModel.PROP_NAME);
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("got a site:" + siteName);
+ }
+ return siteName;
+ }
+ }
+ logger.debug("folder is not in doc library");
+
+ return null;
+ }
+
+ }
+
+ private PseudoFile generateShareURLShortcut(NodeRef nodeRef)
+ {
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("generateShareURLShortcut nodeRef" + nodeRef);
+ }
+ if ( context.isShareURLEnabled())
+ {
+ String site = getSiteForNode(nodeRef);
+ if(site != null)
+ {
+ // Make sure the state has the associated node details
+ // Build the URL file data
+
+ StringBuilder urlStr = new StringBuilder();
+
+// This is the URL generated
+// http://markr:8080/share/page/site/wibble/folder-details?nodeRef=workspace://SpacesStore/f72b2475-7571-46fe-947b-b0ee1b6a82ea
+ urlStr.append("[InternetShortcut]\r\n");
+ urlStr.append("URL=");
+ urlStr.append(getShareURLPrefix());
+ urlStr.append("page/site/");
+ urlStr.append(site + "/folder-details?nodeRef=");
+ urlStr.append(nodeRef.getStoreRef() + "/");
+ urlStr.append( nodeRef.getId());
+ urlStr.append("\r\n");
+
+ // Should this be the URL instead
+// http://markr:8080/share/page/site/wibble/documentlibrary#filter=path%7C%2Ffolder%2520A%2FFolderB&page=1
+
+ // Create the in memory pseudo file for the URL link
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("generateShareURLShortcut url as string:" + urlStr);
+ }
+ byte[] urlData = urlStr.toString().getBytes();
+
+ MemoryPseudoFile urlFile = new MemoryPseudoFile( context.getShareURLFileName(), urlData);
+ return urlFile;
+ }
+
+
+ }
+ return null;
+ }
+
+
+ /**
+ *
+ */
+ public boolean isPseudoFile(NodeRef parentDir, String name)
+ {
+ if ( parentDir == null)
+ {
+ return false;
+ }
+
+ if(context.isAlfrescoURLEnabled())
+ {
+ if(context.getURLFileName().equals(name))
+ {
+ return true;
+ }
+ }
+
+ if(context.isShareURLEnabled())
+ {
+ if(context.getShareURLFileName().equals(name))
+ {
+ return true;
+ }
+ }
+
+ if(getPseudoFile(parentDir, name) != null)
+ {
+ return true;
+ }
+ else
+ {
+ return false;
+ }
+ }
+
+ /**
+ * Get the pseudo file
+ * @param parentDir NodeRef
+ * @param fname String
+ * @return the pseudoFile or null if there is no pseudo file
+ */
+ public PseudoFile getPseudoFile(NodeRef parentDir, String fname)
+ {
+ if ( parentDir == null)
+ {
+ return null;
+ }
+ if (isDeleted(parentDir, fname))
+ {
+ return null;
+ }
+ if(context.isAlfrescoURLEnabled())
+ {
+ if(context.getURLFileName().equals(fname))
+ {
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("returning URL pseudo file");
+ }
+ return generateAlfrescoURLShortcut(parentDir);
+ }
+ }
+
+ if(context.isShareURLEnabled())
+ {
+ if(context.getShareURLFileName().equals(fname))
+ {
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("returning Share URL pseudo file");
+ }
+ return generateShareURLShortcut(parentDir);
+ }
+ }
+
+ PseudoFile file = pl.findFile(fname, false);
+ return file;
+ }
+
+ /**
+ *
+ */
+ public PseudoFileList searchPseudoFiles(NodeRef parentDir, String name)
+ {
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("searchPseudoFile parentDir:" + parentDir +", name:" + name);
+ }
+ //return pseudo files matching the path/pattern
+
+ if ( parentDir == null || name == null || name.length() == 0 || name.equals("\\"))
+ {
+ return null;
+ }
+
+ String fname = name;
+
+ if ( fname.equals( "*.*"))
+ {
+ fname = "*";
+ }
+
+ if ( WildCard.containsWildcards(fname))
+ {
+ // does contain wildcards
+
+ // Check if the wildcard is for all files or a subset
+
+ if ( fname.equals( "*"))
+ {
+ // Match all pseudo files
+ PseudoFileList filterList = new PseudoFileList();
+
+ // copy desktop actions which do not depend on parentDir
+ for ( int i = 0; i < pl.numberOfFiles(); i++)
+ {
+ PseudoFile pseudoFile = pl.getFileAt(i);
+ if(!isDeleted(parentDir, pseudoFile.getFileName()))
+ {
+ // File is not deleted
+ filterList.addFile(pseudoFile);
+ }
+ }
+
+ // The URL file is dependent upon the parent dir
+ if(context.isAlfrescoURLEnabled())
+ {
+ if(!isDeleted(parentDir, context.getURLFileName()))
+ {
+ filterList.addFile(generateAlfrescoURLShortcut(parentDir));
+ }
+ else
+ {
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("alfresco URL pseudo file deleted");
+ }
+ }
+ }
+
+ if(context.isShareURLEnabled())
+ {
+ if(!isDeleted(parentDir, context.getShareURLFileName()))
+ {
+ PseudoFile sharePseudoFile = generateShareURLShortcut(parentDir);
+ if(sharePseudoFile != null)
+ {
+ filterList.addFile(sharePseudoFile);
+ }
+ }
+ }
+
+ return filterList;
+ }
+ else
+ {
+ // Generate a subset of pseudo files that match the wildcard search pattern
+
+ WildCard wildCard = new WildCard( fname, false);
+ PseudoFileList filterList = new PseudoFileList();
+
+ for ( int i = 0; i < pl.numberOfFiles(); i++)
+ {
+ PseudoFile pseudoFile = pl.getFileAt( i);
+ if ( wildCard.matchesPattern( pseudoFile.getFileName()))
+ {
+ if(!isDeleted(parentDir, pseudoFile.getFileName()))
+ {
+ // Add the pseudo file to the filtered list
+ filterList.addFile( pseudoFile);
+ }
+ }
+ }
+
+ // The URL file is dependent upon the parent dir
+ if(context.isAlfrescoURLEnabled())
+ {
+ if(wildCard.matchesPattern(context.getURLFileName()))
+ {
+ if(!isDeleted(parentDir, context.getURLFileName()))
+ {
+ filterList.addFile(generateAlfrescoURLShortcut(parentDir));
+ }
+ }
+ }
+
+ if(context.isShareURLEnabled())
+ {
+ if(wildCard.matchesPattern(context.getShareURLFileName()))
+ {
+ if(!isDeleted(parentDir, context.getShareURLFileName()))
+ {
+ PseudoFile sharePseudoFile = generateShareURLShortcut(parentDir);
+
+ if(sharePseudoFile != null)
+ {
+ filterList.addFile(sharePseudoFile);
+ }
+ }
+ }
+ }
+
+ return filterList;
+ // Use the filtered pseudo file list, or null if there were no matches
+ }
+ }
+ else
+ {
+ // does not contain wild cards
+ PseudoFileList filterList = new PseudoFileList();
+ PseudoFile file = getPseudoFile(parentDir, fname);
+
+ if(file != null && !isDeleted(parentDir, fname))
+ {
+ filterList.addFile(file);
+ }
+
+ return filterList;
+ }
+ }
+
+ @Override
+ public void delete(NodeRef parentDir, String name)
+ {
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("delete pseudo file parentDir:" + parentDir + ", name: " + name);
+ }
+ getDeletePseudoFileCache().put(toDeleteKey(parentDir, name), "Deleted");
+ }
+
+ private String toDeleteKey(NodeRef parentNoderef, String name)
+ {
+ return (parentNoderef.toString() + "/" + name + ":" + context.getDeviceName()).toLowerCase();
+ }
+
+ private boolean isDeleted(NodeRef parentDir, String name)
+ {
+ String key = toDeleteKey(parentDir, name);
+ boolean isDeleted = getDeletePseudoFileCache().contains(key);
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("pseudoFile isDeleted: " + isDeleted + ", for name:" + name);
+ }
+ return isDeleted;
+ }
+
+ //
+ public void setNodeService(NodeService nodeService)
+ {
+ this.nodeService = nodeService;
+ }
+
+ public NodeService getNodeService()
+ {
+ return nodeService;
+ }
+
+ public void setContext(AlfrescoContext context)
+ {
+ this.context = context;
+ }
+
+ public AlfrescoContext getContext()
+ {
+ return context;
+ }
+
+ private final String getAlfrescoURLPrefix()
+ {
+ return sysAdminParams.getAlfrescoProtocol() + "://" + sysAdminParams.getAlfrescoHost() + ":" + sysAdminParams.getAlfrescoPort() + "/" + sysAdminParams.getAlfrescoContext() + "/";
+ }
+ private final String getShareURLPrefix()
+ {
+ return sysAdminParams.getShareProtocol() + "://" + sysAdminParams.getShareHost() + ":" + sysAdminParams.getSharePort() + "/" + sysAdminParams.getShareContext() + "/";
+ }
+
+ public void setSysAdminParams(SysAdminParams sysAdminParams)
+ {
+ this.sysAdminParams = sysAdminParams;
+ }
+
+ public SysAdminParams getSysAdminParams()
+ {
+ return sysAdminParams;
+ }
+
+ public SimpleCache getDeletePseudoFileCache()
+ {
+ return deletePseudoFileCache;
+ }
+
+ public void setDeletePseudoFileCache(SimpleCache deletePseudoFileCache) {
+ this.deletePseudoFileCache = deletePseudoFileCache;
+ }
+}
diff --git a/source/java/org/alfresco/filesys/alfresco/RepositoryDiskInterface.java b/source/java/org/alfresco/filesys/alfresco/RepositoryDiskInterface.java
index 3db0b67653..05eeebfe75 100644
--- a/source/java/org/alfresco/filesys/alfresco/RepositoryDiskInterface.java
+++ b/source/java/org/alfresco/filesys/alfresco/RepositoryDiskInterface.java
@@ -1,125 +1,125 @@
-package org.alfresco.filesys.alfresco;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-
-import org.alfresco.filesys.repo.OpenFileMode;
-import org.alfresco.jlan.server.SrvSession;
-import org.alfresco.jlan.server.filesys.NetworkFile;
-import org.alfresco.jlan.server.filesys.TreeConnection;
-import org.alfresco.service.cmr.repository.NodeRef;
-
-/**
- * Extra methods for DiskInterface, primarily implemented to support CIFS shuffles.
- */
-public interface RepositoryDiskInterface
-{
- /**
- * Copy the content from one node to another.
- *
- * @param rootNode NodeRef
- * @param fromPath - the source node
- * @param toPath - the target node
- * @throws FileNotFoundException
- */
- public void copyContent(NodeRef rootNode, String fromPath, String toPath) throws FileNotFoundException;
-
-
- /**
- * CreateFile.
- *
- * @param rootNode NodeRef
- * @param Path - path
- * @param allocationSize size to allocate for new file
- * @param isHidden boolean
- * @throws FileNotFoundException
- */
- public NetworkFile createFile(NodeRef rootNode, String Path, long allocationSize, boolean isHidden) throws IOException;
-
- /**
- * RestoreFile.
- *
- * Either restores the file or creates a new one.
- *
- * @param sess SrvSession
- * @param tree TreeConnection
- * @param rootNode NodeRef
- * @param path - path
- * @param allocationSize size to allocate for new file
- * @param originalNodeRef NodeRef
- * @throws FileNotFoundException
- */
- public NetworkFile restoreFile(SrvSession sess,
- TreeConnection tree,
- NodeRef rootNode,
- String path,
- long allocationSize,
- NodeRef originalNodeRef) throws IOException;
-
-
- /**
- *
- * @param session // temp until refactor
- * @param tree // temp until refactor
- * @param rootNode NodeRef
- * @param path String
- * @param mode OpenFileMode
- * @param truncate boolean
- * @return NetworkFile
- */
- public NetworkFile openFile(SrvSession session, TreeConnection tree, NodeRef rootNode, String path, OpenFileMode mode, boolean truncate) throws IOException;
-
- /**
- * CloseFile.
- *
- * @param tree TreeConnection
- * @param rootNode NodeRef
- * @param Path - path
- * @param file - file
- * @throws FileNotFoundException
- * @return node ref of deleted file or null if no file deleted
- */
- public NodeRef closeFile(TreeConnection tree, NodeRef rootNode, String Path, NetworkFile file) throws IOException;
-
-
- /**
- * Delete file
- * @param session SrvSession
- * @param tree TreeConnection
- * @param rootNode NodeRef
- * @param path String
- * @return NodeRef of file deleted or null if no file deleted
- * @throws IOException
- */
- public NodeRef deleteFile2(final SrvSession session, final TreeConnection tree, NodeRef rootNode, String path) throws IOException;
-
- /**
- *
- * @param session SrvSession
- * @param tree TreeConnection
- * @param file NetworkFile
- */
- public void reduceQuota(SrvSession session, TreeConnection tree, NetworkFile file);
-
- /**
- *
- * @param rootNode NodeRef
- * @param path String
- */
- public void deleteEmptyFile(NodeRef rootNode, String path);
-
- /**
- * Rename the specified file.
- *
- * @param rootNode root node
- * @param oldName java.lang.String
- * @param newName java.lang.String
- * @param soft boolean
- * @param moveAsSystem move as system
- * @exception java.io.IOException The exception description.
- */
- public void renameFile(NodeRef rootNode, String oldName, String newName, boolean soft, boolean moveAsSystem)
- throws java.io.IOException;
-
-
-}
+package org.alfresco.filesys.alfresco;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
+import org.alfresco.filesys.repo.OpenFileMode;
+import org.alfresco.jlan.server.SrvSession;
+import org.alfresco.jlan.server.filesys.NetworkFile;
+import org.alfresco.jlan.server.filesys.TreeConnection;
+import org.alfresco.service.cmr.repository.NodeRef;
+
+/**
+ * Extra methods for DiskInterface, primarily implemented to support CIFS shuffles.
+ */
+public interface RepositoryDiskInterface
+{
+ /**
+ * Copy the content from one node to another.
+ *
+ * @param rootNode NodeRef
+ * @param fromPath - the source node
+ * @param toPath - the target node
+ * @throws FileNotFoundException
+ */
+ public void copyContent(NodeRef rootNode, String fromPath, String toPath) throws FileNotFoundException;
+
+
+ /**
+ * CreateFile.
+ *
+ * @param rootNode NodeRef
+ * @param Path - path
+ * @param allocationSize size to allocate for new file
+ * @param isHidden boolean
+ * @throws FileNotFoundException
+ */
+ public NetworkFile createFile(NodeRef rootNode, String Path, long allocationSize, boolean isHidden) throws IOException;
+
+ /**
+ * RestoreFile.
+ *
+ * Either restores the file or creates a new one.
+ *
+ * @param sess SrvSession
+ * @param tree TreeConnection
+ * @param rootNode NodeRef
+ * @param path - path
+ * @param allocationSize size to allocate for new file
+ * @param originalNodeRef NodeRef
+ * @throws FileNotFoundException
+ */
+ public NetworkFile restoreFile(SrvSession sess,
+ TreeConnection tree,
+ NodeRef rootNode,
+ String path,
+ long allocationSize,
+ NodeRef originalNodeRef) throws IOException;
+
+
+ /**
+ *
+ * @param session // temp until refactor
+ * @param tree // temp until refactor
+ * @param rootNode NodeRef
+ * @param path String
+ * @param mode OpenFileMode
+ * @param truncate boolean
+ * @return NetworkFile
+ */
+ public NetworkFile openFile(SrvSession session, TreeConnection tree, NodeRef rootNode, String path, OpenFileMode mode, boolean truncate) throws IOException;
+
+ /**
+ * CloseFile.
+ *
+ * @param tree TreeConnection
+ * @param rootNode NodeRef
+ * @param Path - path
+ * @param file - file
+ * @throws FileNotFoundException
+ * @return node ref of deleted file or null if no file deleted
+ */
+ public NodeRef closeFile(TreeConnection tree, NodeRef rootNode, String Path, NetworkFile file) throws IOException;
+
+
+ /**
+ * Delete file
+ * @param session SrvSession
+ * @param tree TreeConnection
+ * @param rootNode NodeRef
+ * @param path String
+ * @return NodeRef of file deleted or null if no file deleted
+ * @throws IOException
+ */
+ public NodeRef deleteFile2(final SrvSession session, final TreeConnection tree, NodeRef rootNode, String path) throws IOException;
+
+ /**
+ *
+ * @param session SrvSession
+ * @param tree TreeConnection
+ * @param file NetworkFile
+ */
+ public void reduceQuota(SrvSession session, TreeConnection tree, NetworkFile file);
+
+ /**
+ *
+ * @param rootNode NodeRef
+ * @param path String
+ */
+ public void deleteEmptyFile(NodeRef rootNode, String path);
+
+ /**
+ * Rename the specified file.
+ *
+ * @param rootNode root node
+ * @param oldName java.lang.String
+ * @param newName java.lang.String
+ * @param soft boolean
+ * @param moveAsSystem move as system
+ * @exception java.io.IOException The exception description.
+ */
+ public void renameFile(NodeRef rootNode, String oldName, String newName, boolean soft, boolean moveAsSystem)
+ throws java.io.IOException;
+
+
+}
diff --git a/source/java/org/alfresco/filesys/alfresco/ShuffleCache.java b/source/java/org/alfresco/filesys/alfresco/ShuffleCache.java
index 682db064ce..760e02d2ee 100644
--- a/source/java/org/alfresco/filesys/alfresco/ShuffleCache.java
+++ b/source/java/org/alfresco/filesys/alfresco/ShuffleCache.java
@@ -1,59 +1,59 @@
-package org.alfresco.filesys.alfresco;
-
-/**
- * Cache for alfresco "save shuffles" which are used by some applications
- * to compensate for a most computer filesystem being non atomic.
- *
- *
- * Overlays an Alfresco repository with temporary files being created and
- * soft deleted from folders that are likely to have save shuffles going on.
- *
- * Implementations must be thread safe
- */
-public interface ShuffleCache
-{
-
- /**
- * Add a new temporary file to the "shuffle cache". Content is not persisted
- * in the alfresco repo until either a rename occurs or after a time delay.
- */
- public void createTemporaryFile(String path);
-
- /**
- * Soft delete a file. The file may be re-instated later or the delete made
- * permenant after a time delay.
- */
- public void softDelete(String path);
-
- /**
- * Takes the contents of a temporary file and applies it to the new path.
- *
- * If the new path has been soft deleted then the soft delete is removed.
- *
- * After the contents of the temporary file have been written the it may may be made
- * available for garbage collection.
- *
- * @param oldPath the location of the temporaryFile
- * @param newPath the location of the new file.
- */
- public void renameTemporaryFile(String oldPath, String newPath);
-
- /**
- * Does the specified directory contain a shuffled temporary file
- * @param dir String
- * @return boolean
- */
- boolean isShuffleDirectory(String dir);
-
- /**
- * Has the path been "soft deleted"
- */
- boolean isDeleted(String path);
-
- /**
- * Has the path been "soft created"
- * @param path String
- * @return boolean
- */
- boolean isCreated(String path);
-}
+package org.alfresco.filesys.alfresco;
+
+/**
+ * Cache for alfresco "save shuffles" which are used by some applications
+ * to compensate for a most computer filesystem being non atomic.
+ *
+ *
+ * Overlays an Alfresco repository with temporary files being created and
+ * soft deleted from folders that are likely to have save shuffles going on.
+ *
+ * Implementations must be thread safe
+ */
+public interface ShuffleCache
+{
+
+ /**
+ * Add a new temporary file to the "shuffle cache". Content is not persisted
+ * in the alfresco repo until either a rename occurs or after a time delay.
+ */
+ public void createTemporaryFile(String path);
+
+ /**
+ * Soft delete a file. The file may be re-instated later or the delete made
+ * permenant after a time delay.
+ */
+ public void softDelete(String path);
+
+ /**
+ * Takes the contents of a temporary file and applies it to the new path.
+ *
+ * If the new path has been soft deleted then the soft delete is removed.
+ *
+ * After the contents of the temporary file have been written the it may may be made
+ * available for garbage collection.
+ *
+ * @param oldPath the location of the temporaryFile
+ * @param newPath the location of the new file.
+ */
+ public void renameTemporaryFile(String oldPath, String newPath);
+
+ /**
+ * Does the specified directory contain a shuffled temporary file
+ * @param dir String
+ * @return boolean
+ */
+ boolean isShuffleDirectory(String dir);
+
+ /**
+ * Has the path been "soft deleted"
+ */
+ boolean isDeleted(String path);
+
+ /**
+ * Has the path been "soft created"
+ * @param path String
+ * @return boolean
+ */
+ boolean isCreated(String path);
+}
diff --git a/source/java/org/alfresco/filesys/alfresco/ShuffleCacheImpl.java b/source/java/org/alfresco/filesys/alfresco/ShuffleCacheImpl.java
index 6b1a20c320..3c4e208496 100644
--- a/source/java/org/alfresco/filesys/alfresco/ShuffleCacheImpl.java
+++ b/source/java/org/alfresco/filesys/alfresco/ShuffleCacheImpl.java
@@ -1,101 +1,101 @@
-package org.alfresco.filesys.alfresco;
-
-import java.util.Map;
-
-/**
- * Cache for alfresco "CIFS shuffles"
- *
- *
- */
-public class ShuffleCacheImpl implements ShuffleCache
-{
- /**
- * time in ms that temporary files should live in the cache before
- * being persisted.
- */
- private long timeBeforePersist = 5 * 60000L; // 5 minutes default
-
- /**
- * Is the cache caseSensitive?
- */
- private boolean caseSensitive;
-
- /**
- * The shuffle folder cache keyed by path.
- *
- */
- private Map folderCache;
-
-
- /**
- * The information held for each folder that has a "shuffle"
- * in progress.
- * @author mrogers
- */
- private class ShuffleFolderInfo
- {
-
- }
-
- @Override
- public void createTemporaryFile(String path)
- {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void softDelete(String path)
- {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void renameTemporaryFile(String oldPath, String newPath)
- {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public boolean isShuffleDirectory(String dir)
- {
- // TODO Auto-generated method stub
- return false;
- }
-
- @Override
- public boolean isDeleted(String path)
- {
- // TODO Auto-generated method stub
- return false;
- }
-
- @Override
- public boolean isCreated(String path)
- {
- // TODO Auto-generated method stub
- return false;
- }
-
- void setTimeBeforePersist(long timeBeforePersist)
- {
- this.timeBeforePersist = timeBeforePersist;
- }
-
- long getTimeBeforePersist()
- {
- return timeBeforePersist;
- }
-
- public void setCaseSensitive(boolean caseSensitive)
- {
- this.caseSensitive = caseSensitive;
- }
-
- public boolean isCaseSensitive()
- {
- return caseSensitive;
- }
-}
+package org.alfresco.filesys.alfresco;
+
+import java.util.Map;
+
+/**
+ * Cache for alfresco "CIFS shuffles"
+ *
+ *
+ */
+public class ShuffleCacheImpl implements ShuffleCache
+{
+ /**
+ * time in ms that temporary files should live in the cache before
+ * being persisted.
+ */
+ private long timeBeforePersist = 5 * 60000L; // 5 minutes default
+
+ /**
+ * Is the cache caseSensitive?
+ */
+ private boolean caseSensitive;
+
+ /**
+ * The shuffle folder cache keyed by path.
+ *
+ */
+ private Map folderCache;
+
+
+ /**
+ * The information held for each folder that has a "shuffle"
+ * in progress.
+ * @author mrogers
+ */
+ private class ShuffleFolderInfo
+ {
+
+ }
+
+ @Override
+ public void createTemporaryFile(String path)
+ {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void softDelete(String path)
+ {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void renameTemporaryFile(String oldPath, String newPath)
+ {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public boolean isShuffleDirectory(String dir)
+ {
+ // TODO Auto-generated method stub
+ return false;
+ }
+
+ @Override
+ public boolean isDeleted(String path)
+ {
+ // TODO Auto-generated method stub
+ return false;
+ }
+
+ @Override
+ public boolean isCreated(String path)
+ {
+ // TODO Auto-generated method stub
+ return false;
+ }
+
+ void setTimeBeforePersist(long timeBeforePersist)
+ {
+ this.timeBeforePersist = timeBeforePersist;
+ }
+
+ long getTimeBeforePersist()
+ {
+ return timeBeforePersist;
+ }
+
+ public void setCaseSensitive(boolean caseSensitive)
+ {
+ this.caseSensitive = caseSensitive;
+ }
+
+ public boolean isCaseSensitive()
+ {
+ return caseSensitive;
+ }
+}
diff --git a/source/java/org/alfresco/filesys/alfresco/package-info.java b/source/java/org/alfresco/filesys/alfresco/package-info.java
index 52a9b7b3c1..8129527030 100644
--- a/source/java/org/alfresco/filesys/alfresco/package-info.java
+++ b/source/java/org/alfresco/filesys/alfresco/package-info.java
@@ -1,15 +1,15 @@
-/**
- * FileSystem
- *
- * DesktopAction
- *
- * AlfrescoDiskDriver
- *
- * MultiTenantShareMapper
- *
- *
- */
-@PackageMarker
-package org.alfresco.filesys.alfresco;
-import org.alfresco.util.PackageMarker;
-
+/**
+ * FileSystem
+ *
+ * DesktopAction
+ *
+ * AlfrescoDiskDriver
+ *
+ * MultiTenantShareMapper
+ *
+ *
+ */
+@PackageMarker
+package org.alfresco.filesys.alfresco;
+import org.alfresco.util.PackageMarker;
+
diff --git a/source/java/org/alfresco/filesys/auth/cifs/package-info.java b/source/java/org/alfresco/filesys/auth/cifs/package-info.java
index e73873bd5f..65aef5a8ea 100644
--- a/source/java/org/alfresco/filesys/auth/cifs/package-info.java
+++ b/source/java/org/alfresco/filesys/auth/cifs/package-info.java
@@ -1,14 +1,14 @@
-/**
- * Provides authentication implementations for CIFS
- *
- * AlfrescoCifsAuthenticator
- * EnterpriseCifsAuthenticator deals with Kerberos, NTLMv1 and NTLMv2
- * PassthruCifsAuthenticator deals with authenticating against an external system
- *
- *
+ * AlfrescoCifsAuthenticator
+ * EnterpriseCifsAuthenticator deals with Kerberos, NTLMv1 and NTLMv2
+ * PassthruCifsAuthenticator deals with authenticating against an external system
+ *
+ *
+ * CifsAuthenticatorBase abstract base class.
+ */
+@PackageMarker
+package org.alfresco.filesys.auth.cifs;
+import org.alfresco.util.PackageMarker;
+
diff --git a/source/java/org/alfresco/filesys/auth/ftp/package-info.java b/source/java/org/alfresco/filesys/auth/ftp/package-info.java
index 58588652b3..13af20ff00 100644
--- a/source/java/org/alfresco/filesys/auth/ftp/package-info.java
+++ b/source/java/org/alfresco/filesys/auth/ftp/package-info.java
@@ -1,5 +1,5 @@
-/**
- */
-@PackageMarker
-package org.alfresco.filesys.auth.ftp;
-import org.alfresco.util.PackageMarker;
+/**
+ */
+@PackageMarker
+package org.alfresco.filesys.auth.ftp;
+import org.alfresco.util.PackageMarker;
diff --git a/source/java/org/alfresco/filesys/auth/nfs/package-info.java b/source/java/org/alfresco/filesys/auth/nfs/package-info.java
index 069f711037..36114895ba 100644
--- a/source/java/org/alfresco/filesys/auth/nfs/package-info.java
+++ b/source/java/org/alfresco/filesys/auth/nfs/package-info.java
@@ -1,5 +1,5 @@
-/**
- */
-@PackageMarker
-package org.alfresco.filesys.auth.nfs;
-import org.alfresco.util.PackageMarker;
+/**
+ */
+@PackageMarker
+package org.alfresco.filesys.auth.nfs;
+import org.alfresco.util.PackageMarker;
diff --git a/source/java/org/alfresco/filesys/auth/package-info.java b/source/java/org/alfresco/filesys/auth/package-info.java
index 4bb679248a..0342d787cb 100644
--- a/source/java/org/alfresco/filesys/auth/package-info.java
+++ b/source/java/org/alfresco/filesys/auth/package-info.java
@@ -1,5 +1,5 @@
-/**
- */
-@PackageMarker
-package org.alfresco.filesys.auth;
-import org.alfresco.util.PackageMarker;
+/**
+ */
+@PackageMarker
+package org.alfresco.filesys.auth;
+import org.alfresco.util.PackageMarker;
diff --git a/source/java/org/alfresco/filesys/config/ClusterConfigBean.java b/source/java/org/alfresco/filesys/config/ClusterConfigBean.java
index b5855e093c..395989e492 100644
--- a/source/java/org/alfresco/filesys/config/ClusterConfigBean.java
+++ b/source/java/org/alfresco/filesys/config/ClusterConfigBean.java
@@ -1,45 +1,45 @@
-package org.alfresco.filesys.config;
-
-/**
- * The Class ClusterConfigBean.
- *
- * @author mrogers
- * @since 4.0
- */
-public class ClusterConfigBean
-{
- private String debugFlags;
- private int nearCacheTimeout;
-
- public boolean getClusterEnabled()
- {
- // No clustering support in community edition.
- return false;
- }
-
- public String getClusterName()
- {
- // No clustering support in community edition.
- return null;
- }
-
- public void setDebugFlags(String debugFlags)
- {
- this.debugFlags = debugFlags;
- }
-
- public String getDebugFlags()
- {
- return debugFlags;
- }
-
- public void setNearCacheTimeout(int nearCacheTimeout)
- {
- this.nearCacheTimeout = nearCacheTimeout;
- }
-
- public int getNearCacheTimeout()
- {
- return nearCacheTimeout;
- }
-}
+package org.alfresco.filesys.config;
+
+/**
+ * The Class ClusterConfigBean.
+ *
+ * @author mrogers
+ * @since 4.0
+ */
+public class ClusterConfigBean
+{
+ private String debugFlags;
+ private int nearCacheTimeout;
+
+ public boolean getClusterEnabled()
+ {
+ // No clustering support in community edition.
+ return false;
+ }
+
+ public String getClusterName()
+ {
+ // No clustering support in community edition.
+ return null;
+ }
+
+ public void setDebugFlags(String debugFlags)
+ {
+ this.debugFlags = debugFlags;
+ }
+
+ public String getDebugFlags()
+ {
+ return debugFlags;
+ }
+
+ public void setNearCacheTimeout(int nearCacheTimeout)
+ {
+ this.nearCacheTimeout = nearCacheTimeout;
+ }
+
+ public int getNearCacheTimeout()
+ {
+ return nearCacheTimeout;
+ }
+}
diff --git a/source/java/org/alfresco/filesys/config/acl/package-info.java b/source/java/org/alfresco/filesys/config/acl/package-info.java
index 1b217aeb0b..2dc9da8ca5 100644
--- a/source/java/org/alfresco/filesys/config/acl/package-info.java
+++ b/source/java/org/alfresco/filesys/config/acl/package-info.java
@@ -1,5 +1,5 @@
-/**
- */
-@PackageMarker
-package org.alfresco.filesys.config.acl;
-import org.alfresco.util.PackageMarker;
+/**
+ */
+@PackageMarker
+package org.alfresco.filesys.config.acl;
+import org.alfresco.util.PackageMarker;
diff --git a/source/java/org/alfresco/filesys/config/package-info.java b/source/java/org/alfresco/filesys/config/package-info.java
index 79209a0e0c..9f811753b5 100644
--- a/source/java/org/alfresco/filesys/config/package-info.java
+++ b/source/java/org/alfresco/filesys/config/package-info.java
@@ -1,5 +1,5 @@
-/**
- */
-@PackageMarker
-package org.alfresco.filesys.config;
-import org.alfresco.util.PackageMarker;
+/**
+ */
+@PackageMarker
+package org.alfresco.filesys.config;
+import org.alfresco.util.PackageMarker;
diff --git a/source/java/org/alfresco/filesys/debug/package-info.java b/source/java/org/alfresco/filesys/debug/package-info.java
index acd114817f..0e4de07f10 100644
--- a/source/java/org/alfresco/filesys/debug/package-info.java
+++ b/source/java/org/alfresco/filesys/debug/package-info.java
@@ -1,5 +1,5 @@
-/**
- */
-@PackageMarker
-package org.alfresco.filesys.debug;
-import org.alfresco.util.PackageMarker;
+/**
+ */
+@PackageMarker
+package org.alfresco.filesys.debug;
+import org.alfresco.util.PackageMarker;
diff --git a/source/java/org/alfresco/filesys/package-info.java b/source/java/org/alfresco/filesys/package-info.java
index ef434ad620..427681d4b5 100644
--- a/source/java/org/alfresco/filesys/package-info.java
+++ b/source/java/org/alfresco/filesys/package-info.java
@@ -1,6 +1,6 @@
-/**
- * The Alfresco file system interface implementation
- */
-@PackageMarker
-package org.alfresco.filesys;
-import org.alfresco.util.PackageMarker;
+/**
+ * The Alfresco file system interface implementation
+ */
+@PackageMarker
+package org.alfresco.filesys;
+import org.alfresco.util.PackageMarker;
diff --git a/source/java/org/alfresco/filesys/repo/AlfrescoFolder.java b/source/java/org/alfresco/filesys/repo/AlfrescoFolder.java
index 8fac5b0fa8..2225151c78 100644
--- a/source/java/org/alfresco/filesys/repo/AlfrescoFolder.java
+++ b/source/java/org/alfresco/filesys/repo/AlfrescoFolder.java
@@ -1,93 +1,93 @@
-package org.alfresco.filesys.repo;
-
-import java.io.IOException;
-
-import org.alfresco.error.AlfrescoRuntimeException;
-import org.alfresco.jlan.server.filesys.FileInfo;
-import org.alfresco.jlan.server.filesys.NetworkFile;
-import org.alfresco.jlan.server.filesys.cache.FileState;
-import org.alfresco.jlan.server.filesys.cache.NetworkFileStateInterface;
-
-/**
- * Object returned to JLAN if the repository object is a folder.
- */
-public class AlfrescoFolder extends NetworkFile implements NetworkFileStateInterface
-{
- public AlfrescoFolder(String path, FileInfo fileInfo, boolean readOnly)
- {
- super(path);
- setFullName(path);
-
- // Set the file timestamps
-
- if ( fileInfo.hasCreationDateTime())
- setCreationDate( fileInfo.getCreationDateTime());
-
- if ( fileInfo.hasModifyDateTime())
- setModifyDate(fileInfo.getModifyDateTime());
-
- if ( fileInfo.hasAccessDateTime())
- setAccessDate(fileInfo.getAccessDateTime());
-
- // Set the file attributes
- setAttributes(fileInfo.getFileAttributes());
- }
-
- @Override
- public void openFile(boolean createFlag) throws IOException
- {
- throw new AlfrescoRuntimeException("Unable to open channel for a directory network file: " + this);
- }
-
- @Override
- public int readFile(byte[] buf, int len, int pos, long fileOff)
- throws IOException
- {
- throw new AlfrescoRuntimeException("Unable to open channel for a directory network file: " + this);
- }
-
- @Override
- public void writeFile(byte[] buf, int len, int pos, long fileOff)
- throws IOException
- {
- throw new AlfrescoRuntimeException("Unable to open channel for a directory network file: " + this);
- }
-
- @Override
- public long seekFile(long pos, int typ) throws IOException
- {
- return 0;
- }
-
- @Override
- public void flushFile() throws IOException
- {
- // Do nothing.
- }
-
- @Override
- public void truncateFile(long siz) throws IOException
- {
- throw new AlfrescoRuntimeException("Unable to open channel for a directory network file: " + this);
- }
-
- @Override
- public void closeFile() throws IOException
- {
- setClosed(true);
- }
-
- // For JLAN file state lock manager
- public void setFileState(FileState fileState)
- {
- this.fileState = fileState;
- }
-
- @Override
- public FileState getFileState()
- {
- return fileState;
-
- }
- private FileState fileState;
-}
+package org.alfresco.filesys.repo;
+
+import java.io.IOException;
+
+import org.alfresco.error.AlfrescoRuntimeException;
+import org.alfresco.jlan.server.filesys.FileInfo;
+import org.alfresco.jlan.server.filesys.NetworkFile;
+import org.alfresco.jlan.server.filesys.cache.FileState;
+import org.alfresco.jlan.server.filesys.cache.NetworkFileStateInterface;
+
+/**
+ * Object returned to JLAN if the repository object is a folder.
+ */
+public class AlfrescoFolder extends NetworkFile implements NetworkFileStateInterface
+{
+ public AlfrescoFolder(String path, FileInfo fileInfo, boolean readOnly)
+ {
+ super(path);
+ setFullName(path);
+
+ // Set the file timestamps
+
+ if ( fileInfo.hasCreationDateTime())
+ setCreationDate( fileInfo.getCreationDateTime());
+
+ if ( fileInfo.hasModifyDateTime())
+ setModifyDate(fileInfo.getModifyDateTime());
+
+ if ( fileInfo.hasAccessDateTime())
+ setAccessDate(fileInfo.getAccessDateTime());
+
+ // Set the file attributes
+ setAttributes(fileInfo.getFileAttributes());
+ }
+
+ @Override
+ public void openFile(boolean createFlag) throws IOException
+ {
+ throw new AlfrescoRuntimeException("Unable to open channel for a directory network file: " + this);
+ }
+
+ @Override
+ public int readFile(byte[] buf, int len, int pos, long fileOff)
+ throws IOException
+ {
+ throw new AlfrescoRuntimeException("Unable to open channel for a directory network file: " + this);
+ }
+
+ @Override
+ public void writeFile(byte[] buf, int len, int pos, long fileOff)
+ throws IOException
+ {
+ throw new AlfrescoRuntimeException("Unable to open channel for a directory network file: " + this);
+ }
+
+ @Override
+ public long seekFile(long pos, int typ) throws IOException
+ {
+ return 0;
+ }
+
+ @Override
+ public void flushFile() throws IOException
+ {
+ // Do nothing.
+ }
+
+ @Override
+ public void truncateFile(long siz) throws IOException
+ {
+ throw new AlfrescoRuntimeException("Unable to open channel for a directory network file: " + this);
+ }
+
+ @Override
+ public void closeFile() throws IOException
+ {
+ setClosed(true);
+ }
+
+ // For JLAN file state lock manager
+ public void setFileState(FileState fileState)
+ {
+ this.fileState = fileState;
+ }
+
+ @Override
+ public FileState getFileState()
+ {
+ return fileState;
+
+ }
+ private FileState fileState;
+}
diff --git a/source/java/org/alfresco/filesys/repo/BufferedContentDiskDriver.java b/source/java/org/alfresco/filesys/repo/BufferedContentDiskDriver.java
index eb4732d825..cd216b110a 100644
--- a/source/java/org/alfresco/filesys/repo/BufferedContentDiskDriver.java
+++ b/source/java/org/alfresco/filesys/repo/BufferedContentDiskDriver.java
@@ -1,650 +1,650 @@
-package org.alfresco.filesys.repo;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.Date;
-
-import org.alfresco.filesys.config.ServerConfigurationBean;
-import org.alfresco.filesys.alfresco.ExtendedDiskInterface;
-import org.alfresco.jlan.server.SrvSession;
-import org.alfresco.jlan.server.core.DeviceContext;
-import org.alfresco.jlan.server.core.DeviceContextException;
-import org.alfresco.jlan.server.core.SharedDevice;
-import org.alfresco.jlan.server.filesys.DiskDeviceContext;
-import org.alfresco.jlan.server.filesys.DiskInterface;
-import org.alfresco.jlan.server.filesys.DiskSizeInterface;
-import org.alfresco.jlan.server.filesys.FileAccessToken;
-import org.alfresco.jlan.server.filesys.FileInfo;
-import org.alfresco.jlan.server.filesys.FileOpenParams;
-import org.alfresco.jlan.server.filesys.FileStatus;
-import org.alfresco.jlan.server.filesys.IOControlNotImplementedException;
-import org.alfresco.jlan.server.filesys.IOCtlInterface;
-import org.alfresco.jlan.server.filesys.NetworkFile;
-import org.alfresco.jlan.server.filesys.SearchContext;
-import org.alfresco.jlan.server.filesys.SrvDiskInfo;
-import org.alfresco.jlan.server.filesys.TreeConnection;
-import org.alfresco.jlan.server.filesys.cache.FileState;
-import org.alfresco.jlan.server.filesys.cache.FileStateCache;
-import org.alfresco.jlan.server.locking.FileLockingInterface;
-import org.alfresco.jlan.server.locking.LockManager;
-import org.alfresco.jlan.server.locking.OpLockInterface;
-import org.alfresco.jlan.server.locking.OpLockManager;
-import org.alfresco.jlan.smb.SMBException;
-import org.alfresco.jlan.util.DataBuffer;
-import org.alfresco.repo.cache.SimpleCache;
-import org.alfresco.repo.node.NodeServicePolicies;
-import org.alfresco.repo.policy.JavaBehaviour;
-import org.alfresco.repo.policy.PolicyComponent;
-import org.alfresco.repo.security.authentication.AuthenticationUtil;
-import org.alfresco.service.cmr.repository.ChildAssociationRef;
-import org.alfresco.util.PropertyCheck;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.springframework.extensions.config.ConfigElement;
-
-/**
- * Alfresco Content Disk Driver Cache
- *
- * Decorates ContentDiskDriver with a performance cache of some frequently used
- * results. In particular for getFileInformation and fileExists
- */
-/*
- * MER - this class is also acting as a proxy to gather together the different interfaces
- * and present them to JLAN. This was not the intention and is a short term hack. It
- * should be possible to un-spring the buffering, however that's not possible at the moment.
- */
-public class BufferedContentDiskDriver implements ExtendedDiskInterface,
- DiskInterface,
- DiskSizeInterface,
- IOCtlInterface,
- OpLockInterface,
- FileLockingInterface,
- NodeServicePolicies.OnDeleteNodePolicy,
- NodeServicePolicies.OnMoveNodePolicy
-{
- // Logging
- private static final Log logger = LogFactory.getLog(BufferedContentDiskDriver.class);
-
- private ExtendedDiskInterface diskInterface;
-
- private DiskSizeInterface diskSizeInterface;
-
- private IOCtlInterface ioctlInterface;
-
- private OpLockInterface opLockInterface;
-
- private FileLockingInterface fileLockingInterface;
-
- private PolicyComponent policyComponent;
-
- public void init()
- {
- PropertyCheck.mandatory(this, "diskInterface", diskInterface);
- PropertyCheck.mandatory(this, "diskSizeInterface", diskSizeInterface);
- PropertyCheck.mandatory(this, "ioctltInterface", ioctlInterface);
- PropertyCheck.mandatory(this, "fileInfoCache", fileInfoCache);
- PropertyCheck.mandatory(this, "fileLockingInterface", getFileLockingInterface());
- PropertyCheck.mandatory(this, "opLockInterface", getOpLockInterface());
- PropertyCheck.mandatory(this, "fileLockingInterface", fileLockingInterface);
- PropertyCheck.mandatory(this, "policyComponent", getPolicyComponent());
-
- getPolicyComponent().bindClassBehaviour( NodeServicePolicies.OnDeleteNodePolicy.QNAME,
- this, new JavaBehaviour(this, "onDeleteNode"));
- getPolicyComponent().bindClassBehaviour( NodeServicePolicies.OnMoveNodePolicy.QNAME,
- this, new JavaBehaviour(this, "onMoveNode"));
- }
-
- /**
- * FileInfo Cache for path to FileInfo
- */
- private SimpleCache fileInfoCache;
-
- /**
- * Set the cache that maintains node ID-NodeRef cross referencing data
- *
- * @param cache the cache
- */
- public void setFileInfoCache(SimpleCache cache)
- {
- this.fileInfoCache = cache;
- }
-
- private static class FileInfoKey implements Serializable
- {
- /**
- *
- */
- private static final long serialVersionUID = 1L;
-
- String deviceName;
- String path;
- String user;
- int hashCode;
-
- public FileInfoKey(SrvSession sess, String path, TreeConnection tree)
- {
- this.path = path;
- this.user = sess.getUniqueId();
- this.deviceName = tree.getSharedDevice().getName();
-
-// if(deviceName == null)
-// {
-// throw new RuntimeException("device name is null");
-// }
-// if(path == null)
-// {
-// throw new RuntimeException("path is null");
-// }
-// if(user == null)
-// {
-// throw new RuntimeException("unique id is null");
-// }
- }
-
- @Override
- public boolean equals(Object other)
- {
- if (this == other)
- {
- return true;
- }
- if (other == null || !(other instanceof FileInfoKey))
- {
- return false;
- }
-
- FileInfoKey o = (FileInfoKey)other;
-
- return path.equals(o.path) && user.equals(o.user) && deviceName.equals(o.deviceName);
- }
-
- @Override
- public int hashCode()
- {
- if(hashCode == 0)
- {
- hashCode = (user+path+deviceName).hashCode();
- }
- return hashCode;
- }
- }
-
- private FileInfo getFileInformationInternal(SrvSession sess, TreeConnection tree,
- String path) throws IOException
- {
-
- //String userName = AuthenticationUtil.getFullyAuthenticatedUser();
- SharedDevice device = tree.getSharedDevice();
- String deviceName = device.getName();
-
- if(logger.isDebugEnabled())
- {
- logger.debug("getFileInformation session:" + sess.getUniqueId() + ", deviceName:" + deviceName + ", path:" + path);
- }
-
- if(path == null)
- {
- throw new IllegalArgumentException("Path is null");
- }
-
- FileInfoKey key = new FileInfoKey(sess, path, tree);
-
- FileInfo fromCache = fileInfoCache.get(key);
-
- if(fromCache != null)
- {
- if(logger.isDebugEnabled())
- {
- logger.debug("returning FileInfo from cache");
- }
- return fromCache;
- }
-
- FileInfo info = diskInterface.getFileInformation(sess, tree, path);
-
- if(info != null)
- {
- /**
- * Don't cache directories since the modification date is important.
- */
- if(!info.isDirectory())
- {
- fileInfoCache.put(key, info);
- }
- }
-
- /*
- * Dual Key the cache so it can be looked up by NodeRef or Path
- */
- if(info instanceof ContentFileInfo)
- {
- ContentFileInfo cinfo = (ContentFileInfo)info;
- fileInfoCache.put(cinfo.getNodeRef(), info);
- }
-
- return info;
- }
-
-
- @Override
- public FileInfo getFileInformation(SrvSession sess, TreeConnection tree,
- String path) throws IOException
- {
- ContentContext tctx = (ContentContext) tree.getContext();
-
- FileInfo info = getFileInformationInternal(sess, tree, path);
-
- /*
- * Some information is not maintained by the repo and represents an in-progress update.
- * For example as a file is being written the modification and access dates change.
- */
- if(tctx.hasStateCache())
- {
- FileStateCache cache = tctx.getStateCache();
- FileState fstate = cache.findFileState(path, false);
- if(fstate != null)
- {
- if(logger.isDebugEnabled())
- {
- logger.debug("state cache available - overwriting from state cache: isDirectory=" +info.isDirectory());
- }
- FileInfo finfo = new FileInfo();
- finfo.copyFrom(info);
-
- /**
- * File state is probably stale for directories which is why we don't attempt to
- * cache.
- */
- if(!info.isDirectory())
- {
- /*
- * What about stale file state values here?
- */
- if(fstate.hasFileSize())
- {
- if(logger.isDebugEnabled())
- {
- logger.debug("replace file size " + info.getSize() + " with " + fstate.getFileSize());
- }
- finfo.setFileSize(fstate.getFileSize());
- }
- if ( fstate.hasAccessDateTime())
- {
- if(logger.isDebugEnabled())
- {
- logger.debug("replace access date " + new Date(finfo.getAccessDateTime()) + " with " + new Date(fstate.getAccessDateTime()));
- }
- finfo.setAccessDateTime(fstate.getAccessDateTime());
- }
- if ( fstate.hasChangeDateTime())
- {
- if(logger.isDebugEnabled())
- {
- logger.debug("replace change date " + new Date(finfo.getChangeDateTime()) + " with " + new Date(fstate.getChangeDateTime()));
- }
- finfo.setChangeDateTime(fstate.getChangeDateTime());
- }
- if ( fstate.hasModifyDateTime())
- {
- if(logger.isDebugEnabled())
- {
- logger.debug("replace modified date " + new Date(finfo.getModifyDateTime()) + " with " + new Date(fstate.getModifyDateTime()));
- }
- finfo.setModifyDateTime(fstate.getModifyDateTime());
- }
- if ( fstate.hasAllocationSize())
- {
- if(logger.isDebugEnabled())
- {
- logger.debug("replace allocation size" + finfo.getAllocationSize() + " with " + fstate.getAllocationSize());
- }
- finfo.setAllocationSize(fstate.getAllocationSize());
- }
- }
-
- if(logger.isDebugEnabled())
- {
- logger.debug("Return getFileInformation, path: " + path +
- ", returning:" + finfo +
- ", readOnly:" +finfo.isReadOnly() +
- ", fileId:" +finfo.getFileId() +
- ", fileSize:" +finfo.getSize() +
- ", directoryId:" + finfo.getDirectoryId() +
- ", createdDate: " + new Date(finfo.getCreationDateTime()) +
- ", accessDate:" + new Date(finfo.getAccessDateTime()) +
- ", modifiedDate:" + new Date(finfo.getModifyDateTime()) +
- ", changeDate:" + new Date(finfo.getChangeDateTime()) +
- ", fileAttributes: 0x"+ Integer.toHexString(info.getFileAttributes()) +
- ", mode: 0x" + Integer.toHexString(finfo.getMode()));
- }
-
- return finfo;
- }
- }
-
- if(logger.isDebugEnabled())
- {
- logger.debug("getFileInformation Return:" + path + " returning" + info);
- }
-
- return info;
-
- }
-
- @Override
- public int fileExists(SrvSession sess, TreeConnection tree, String path)
- {
- String deviceName = tree.getSharedDevice().getName();
-
- if(logger.isDebugEnabled())
- {
- logger.debug("fileExists session:" + sess.getUniqueId() + ", deviceName" + deviceName + ", path:" + path);
- }
-
- FileInfoKey key = new FileInfoKey(sess, path, tree);
-
- FileInfo fromCache = fileInfoCache.get(key);
-
- if(fromCache != null)
- {
- if(logger.isDebugEnabled())
- {
- logger.debug("fileExists found FileInfo in cache");
- }
- if (fromCache.isDirectory())
- {
- return FileStatus.DirectoryExists;
- }
- else
- {
- return FileStatus.FileExists;
- }
- }
- else
- {
- try
- {
- FileInfo lookup = getFileInformationInternal(sess, tree, path);
-
- if(logger.isDebugEnabled())
- {
- logger.debug("fileExists obtained file information");
- }
- if (lookup.isDirectory())
- {
- return FileStatus.DirectoryExists;
- }
- else
- {
- return FileStatus.FileExists;
- }
- }
- catch (IOException ie)
- {
- return FileStatus.NotExist;
- }
- }
- }
-
- @Override
- public DeviceContext createContext(String shareName, ConfigElement args)
- throws DeviceContextException
- {
- return diskInterface.createContext(shareName, args);
- }
-
- @Override
- public void treeOpened(SrvSession sess, TreeConnection tree)
- {
- diskInterface.treeOpened(sess, tree);
- }
-
- @Override
- public void treeClosed(SrvSession sess, TreeConnection tree)
- {
- diskInterface.treeClosed(sess, tree);
- }
-
- @Override
- public DataBuffer processIOControl(SrvSession sess, TreeConnection tree,
- int ctrlCode, int fid, DataBuffer dataBuf, boolean isFSCtrl,
- int filter) throws IOControlNotImplementedException, SMBException
- {
- return ioctlInterface.processIOControl(sess, tree, ctrlCode, fid, dataBuf, isFSCtrl, filter);
- }
-
- @Override
- public void getDiskInformation(DiskDeviceContext ctx, SrvDiskInfo diskDev)
- throws IOException
- {
- diskSizeInterface.getDiskInformation(ctx, diskDev);
- }
-
- @Override
- public void closeFile(SrvSession sess, TreeConnection tree,
- NetworkFile param) throws IOException
- {
- diskInterface.closeFile(sess, tree, param);
-
- /**
- * If the fileInfo cache may have just had some content updated.
- */
- if(!param.isDirectory() && !param.isReadOnly())
- {
- fileInfoCache.clear();
- }
- }
-
- @Override
- public void createDirectory(SrvSession sess, TreeConnection tree,
- FileOpenParams params) throws IOException
- {
- diskInterface.createDirectory(sess, tree, params);
- }
-
- @Override
- public NetworkFile createFile(SrvSession sess, TreeConnection tree,
- FileOpenParams params) throws IOException
- {
- return diskInterface.createFile(sess, tree, params);
- }
-
- @Override
- public void deleteDirectory(SrvSession sess, TreeConnection tree, String dir)
- throws IOException
- {
- fileInfoCache.remove(dir);
-
- diskInterface.deleteDirectory(sess, tree, dir);
- }
-
- @Override
- public void deleteFile(SrvSession sess, TreeConnection tree, String name)
- throws IOException
- {
- fileInfoCache.remove(name);
-
- diskInterface.deleteFile(sess, tree, name);
- }
-
- @Override
- public void flushFile(SrvSession sess, TreeConnection tree, NetworkFile file)
- throws IOException
- {
- diskInterface.flushFile(sess, tree, file);
- }
-
- @Override
- public boolean isReadOnly(SrvSession sess, DeviceContext ctx)
- throws IOException
- {
- return diskInterface.isReadOnly(sess, ctx);
- }
-
- @Override
- public NetworkFile openFile(SrvSession sess, TreeConnection tree,
- FileOpenParams params) throws IOException
- {
- return diskInterface.openFile(sess, tree, params);
- }
-
- @Override
- public int readFile(SrvSession sess, TreeConnection tree, NetworkFile file,
- byte[] buf, int bufPos, int siz, long filePos) throws IOException
- {
- return diskInterface.readFile(sess, tree, file, buf, bufPos, siz, filePos);
- }
-
- @Override
- public void renameFile(SrvSession sess, TreeConnection tree,
- String oldName, String newName) throws IOException
- {
- diskInterface.renameFile(sess, tree, oldName, newName);
- }
-
- @Override
- public long seekFile(SrvSession sess, TreeConnection tree,
- NetworkFile file, long pos, int typ) throws IOException
- {
- return diskInterface.seekFile(sess, tree, file, pos, typ);
- }
-
- @Override
- public void setFileInformation(SrvSession sess, TreeConnection tree,
- String name, FileInfo info) throws IOException
- {
- diskInterface.setFileInformation(sess, tree, name, info);
- }
-
- @Override
- public SearchContext startSearch(SrvSession sess, TreeConnection tree,
- String searchPath, int attrib) throws FileNotFoundException
- {
- return diskInterface.startSearch(sess, tree, searchPath, attrib);
- }
-
- @Override
- public void truncateFile(SrvSession sess, TreeConnection tree,
- NetworkFile file, long siz) throws IOException
- {
- diskInterface.truncateFile(sess, tree, file, siz);
- }
-
- @Override
- public int writeFile(SrvSession sess, TreeConnection tree,
- NetworkFile file, byte[] buf, int bufoff, int siz, long fileoff)
- throws IOException
- {
- return diskInterface.writeFile(sess, tree, file, buf, bufoff, siz, fileoff);
- }
-
- @Override
- public void registerContext(DeviceContext ctx)
- throws DeviceContextException
- {
- diskInterface.registerContext(ctx);
- }
-
- public void setDiskInterface(ExtendedDiskInterface diskInterface)
- {
- this.diskInterface = diskInterface;
- }
-
- public ExtendedDiskInterface getDiskInterface()
- {
- return diskInterface;
- }
-
- public void setDiskSizeInterface(DiskSizeInterface diskSizeInterface)
- {
- this.diskSizeInterface = diskSizeInterface;
- }
-
- public DiskSizeInterface getDiskSizeInterface()
- {
- return diskSizeInterface;
- }
-
- public void setIoctlInterface(IOCtlInterface iocltlInterface)
- {
- this.ioctlInterface = iocltlInterface;
- }
-
- public IOCtlInterface getIoctlInterface()
- {
- return ioctlInterface;
- }
-
- @Override
- public void onMoveNode(ChildAssociationRef oldChildAssocRef,
- ChildAssociationRef newChildAssocRef)
- {
- if(fileInfoCache.contains(oldChildAssocRef.getChildRef()))
- {
- logger.debug("cached node moved - clear the cache");
- fileInfoCache.clear();
- }
- }
-
- @Override
- public void onDeleteNode(ChildAssociationRef oldChildAssocRef, boolean isArchived)
- {
- if(fileInfoCache.contains(oldChildAssocRef.getChildRef()))
- {
- logger.debug("cached node deleted - clear the cache");
- fileInfoCache.clear();
- }
- }
-
- public void setPolicyComponent(PolicyComponent policyComponent)
- {
- this.policyComponent = policyComponent;
- }
-
- public PolicyComponent getPolicyComponent()
- {
- return policyComponent;
- }
-
- public void setOpLockInterface(OpLockInterface opLockInterface)
- {
- this.opLockInterface = opLockInterface;
- }
-
- public OpLockInterface getOpLockInterface()
- {
- return opLockInterface;
- }
-
- @Override
- public OpLockManager getOpLockManager(SrvSession sess, TreeConnection tree)
- {
- return opLockInterface.getOpLockManager(sess, tree);
- }
-
- @Override
- public boolean isOpLocksEnabled(SrvSession sess, TreeConnection tree)
- {
- return opLockInterface.isOpLocksEnabled(sess, tree);
- }
-
- @Override
- public LockManager getLockManager(SrvSession sess, TreeConnection tree)
- {
- return getFileLockingInterface().getLockManager(sess, tree);
- }
-
-
- public void setFileLockingInterface(FileLockingInterface fileLockingInterface)
- {
- this.fileLockingInterface = fileLockingInterface;
- }
-
-
- public FileLockingInterface getFileLockingInterface()
- {
- return fileLockingInterface;
- }
-}
+package org.alfresco.filesys.repo;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.Date;
+
+import org.alfresco.filesys.config.ServerConfigurationBean;
+import org.alfresco.filesys.alfresco.ExtendedDiskInterface;
+import org.alfresco.jlan.server.SrvSession;
+import org.alfresco.jlan.server.core.DeviceContext;
+import org.alfresco.jlan.server.core.DeviceContextException;
+import org.alfresco.jlan.server.core.SharedDevice;
+import org.alfresco.jlan.server.filesys.DiskDeviceContext;
+import org.alfresco.jlan.server.filesys.DiskInterface;
+import org.alfresco.jlan.server.filesys.DiskSizeInterface;
+import org.alfresco.jlan.server.filesys.FileAccessToken;
+import org.alfresco.jlan.server.filesys.FileInfo;
+import org.alfresco.jlan.server.filesys.FileOpenParams;
+import org.alfresco.jlan.server.filesys.FileStatus;
+import org.alfresco.jlan.server.filesys.IOControlNotImplementedException;
+import org.alfresco.jlan.server.filesys.IOCtlInterface;
+import org.alfresco.jlan.server.filesys.NetworkFile;
+import org.alfresco.jlan.server.filesys.SearchContext;
+import org.alfresco.jlan.server.filesys.SrvDiskInfo;
+import org.alfresco.jlan.server.filesys.TreeConnection;
+import org.alfresco.jlan.server.filesys.cache.FileState;
+import org.alfresco.jlan.server.filesys.cache.FileStateCache;
+import org.alfresco.jlan.server.locking.FileLockingInterface;
+import org.alfresco.jlan.server.locking.LockManager;
+import org.alfresco.jlan.server.locking.OpLockInterface;
+import org.alfresco.jlan.server.locking.OpLockManager;
+import org.alfresco.jlan.smb.SMBException;
+import org.alfresco.jlan.util.DataBuffer;
+import org.alfresco.repo.cache.SimpleCache;
+import org.alfresco.repo.node.NodeServicePolicies;
+import org.alfresco.repo.policy.JavaBehaviour;
+import org.alfresco.repo.policy.PolicyComponent;
+import org.alfresco.repo.security.authentication.AuthenticationUtil;
+import org.alfresco.service.cmr.repository.ChildAssociationRef;
+import org.alfresco.util.PropertyCheck;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.springframework.extensions.config.ConfigElement;
+
+/**
+ * Alfresco Content Disk Driver Cache
+ *
+ * Decorates ContentDiskDriver with a performance cache of some frequently used
+ * results. In particular for getFileInformation and fileExists
+ */
+/*
+ * MER - this class is also acting as a proxy to gather together the different interfaces
+ * and present them to JLAN. This was not the intention and is a short term hack. It
+ * should be possible to un-spring the buffering, however that's not possible at the moment.
+ */
+public class BufferedContentDiskDriver implements ExtendedDiskInterface,
+ DiskInterface,
+ DiskSizeInterface,
+ IOCtlInterface,
+ OpLockInterface,
+ FileLockingInterface,
+ NodeServicePolicies.OnDeleteNodePolicy,
+ NodeServicePolicies.OnMoveNodePolicy
+{
+ // Logging
+ private static final Log logger = LogFactory.getLog(BufferedContentDiskDriver.class);
+
+ private ExtendedDiskInterface diskInterface;
+
+ private DiskSizeInterface diskSizeInterface;
+
+ private IOCtlInterface ioctlInterface;
+
+ private OpLockInterface opLockInterface;
+
+ private FileLockingInterface fileLockingInterface;
+
+ private PolicyComponent policyComponent;
+
+ public void init()
+ {
+ PropertyCheck.mandatory(this, "diskInterface", diskInterface);
+ PropertyCheck.mandatory(this, "diskSizeInterface", diskSizeInterface);
+ PropertyCheck.mandatory(this, "ioctltInterface", ioctlInterface);
+ PropertyCheck.mandatory(this, "fileInfoCache", fileInfoCache);
+ PropertyCheck.mandatory(this, "fileLockingInterface", getFileLockingInterface());
+ PropertyCheck.mandatory(this, "opLockInterface", getOpLockInterface());
+ PropertyCheck.mandatory(this, "fileLockingInterface", fileLockingInterface);
+ PropertyCheck.mandatory(this, "policyComponent", getPolicyComponent());
+
+ getPolicyComponent().bindClassBehaviour( NodeServicePolicies.OnDeleteNodePolicy.QNAME,
+ this, new JavaBehaviour(this, "onDeleteNode"));
+ getPolicyComponent().bindClassBehaviour( NodeServicePolicies.OnMoveNodePolicy.QNAME,
+ this, new JavaBehaviour(this, "onMoveNode"));
+ }
+
+ /**
+ * FileInfo Cache for path to FileInfo
+ */
+ private SimpleCache fileInfoCache;
+
+ /**
+ * Set the cache that maintains node ID-NodeRef cross referencing data
+ *
+ * @param cache the cache
+ */
+ public void setFileInfoCache(SimpleCache cache)
+ {
+ this.fileInfoCache = cache;
+ }
+
+ private static class FileInfoKey implements Serializable
+ {
+ /**
+ *
+ */
+ private static final long serialVersionUID = 1L;
+
+ String deviceName;
+ String path;
+ String user;
+ int hashCode;
+
+ public FileInfoKey(SrvSession sess, String path, TreeConnection tree)
+ {
+ this.path = path;
+ this.user = sess.getUniqueId();
+ this.deviceName = tree.getSharedDevice().getName();
+
+// if(deviceName == null)
+// {
+// throw new RuntimeException("device name is null");
+// }
+// if(path == null)
+// {
+// throw new RuntimeException("path is null");
+// }
+// if(user == null)
+// {
+// throw new RuntimeException("unique id is null");
+// }
+ }
+
+ @Override
+ public boolean equals(Object other)
+ {
+ if (this == other)
+ {
+ return true;
+ }
+ if (other == null || !(other instanceof FileInfoKey))
+ {
+ return false;
+ }
+
+ FileInfoKey o = (FileInfoKey)other;
+
+ return path.equals(o.path) && user.equals(o.user) && deviceName.equals(o.deviceName);
+ }
+
+ @Override
+ public int hashCode()
+ {
+ if(hashCode == 0)
+ {
+ hashCode = (user+path+deviceName).hashCode();
+ }
+ return hashCode;
+ }
+ }
+
+ private FileInfo getFileInformationInternal(SrvSession sess, TreeConnection tree,
+ String path) throws IOException
+ {
+
+ //String userName = AuthenticationUtil.getFullyAuthenticatedUser();
+ SharedDevice device = tree.getSharedDevice();
+ String deviceName = device.getName();
+
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("getFileInformation session:" + sess.getUniqueId() + ", deviceName:" + deviceName + ", path:" + path);
+ }
+
+ if(path == null)
+ {
+ throw new IllegalArgumentException("Path is null");
+ }
+
+ FileInfoKey key = new FileInfoKey(sess, path, tree);
+
+ FileInfo fromCache = fileInfoCache.get(key);
+
+ if(fromCache != null)
+ {
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("returning FileInfo from cache");
+ }
+ return fromCache;
+ }
+
+ FileInfo info = diskInterface.getFileInformation(sess, tree, path);
+
+ if(info != null)
+ {
+ /**
+ * Don't cache directories since the modification date is important.
+ */
+ if(!info.isDirectory())
+ {
+ fileInfoCache.put(key, info);
+ }
+ }
+
+ /*
+ * Dual Key the cache so it can be looked up by NodeRef or Path
+ */
+ if(info instanceof ContentFileInfo)
+ {
+ ContentFileInfo cinfo = (ContentFileInfo)info;
+ fileInfoCache.put(cinfo.getNodeRef(), info);
+ }
+
+ return info;
+ }
+
+
+ @Override
+ public FileInfo getFileInformation(SrvSession sess, TreeConnection tree,
+ String path) throws IOException
+ {
+ ContentContext tctx = (ContentContext) tree.getContext();
+
+ FileInfo info = getFileInformationInternal(sess, tree, path);
+
+ /*
+ * Some information is not maintained by the repo and represents an in-progress update.
+ * For example as a file is being written the modification and access dates change.
+ */
+ if(tctx.hasStateCache())
+ {
+ FileStateCache cache = tctx.getStateCache();
+ FileState fstate = cache.findFileState(path, false);
+ if(fstate != null)
+ {
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("state cache available - overwriting from state cache: isDirectory=" +info.isDirectory());
+ }
+ FileInfo finfo = new FileInfo();
+ finfo.copyFrom(info);
+
+ /**
+ * File state is probably stale for directories which is why we don't attempt to
+ * cache.
+ */
+ if(!info.isDirectory())
+ {
+ /*
+ * What about stale file state values here?
+ */
+ if(fstate.hasFileSize())
+ {
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("replace file size " + info.getSize() + " with " + fstate.getFileSize());
+ }
+ finfo.setFileSize(fstate.getFileSize());
+ }
+ if ( fstate.hasAccessDateTime())
+ {
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("replace access date " + new Date(finfo.getAccessDateTime()) + " with " + new Date(fstate.getAccessDateTime()));
+ }
+ finfo.setAccessDateTime(fstate.getAccessDateTime());
+ }
+ if ( fstate.hasChangeDateTime())
+ {
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("replace change date " + new Date(finfo.getChangeDateTime()) + " with " + new Date(fstate.getChangeDateTime()));
+ }
+ finfo.setChangeDateTime(fstate.getChangeDateTime());
+ }
+ if ( fstate.hasModifyDateTime())
+ {
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("replace modified date " + new Date(finfo.getModifyDateTime()) + " with " + new Date(fstate.getModifyDateTime()));
+ }
+ finfo.setModifyDateTime(fstate.getModifyDateTime());
+ }
+ if ( fstate.hasAllocationSize())
+ {
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("replace allocation size" + finfo.getAllocationSize() + " with " + fstate.getAllocationSize());
+ }
+ finfo.setAllocationSize(fstate.getAllocationSize());
+ }
+ }
+
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("Return getFileInformation, path: " + path +
+ ", returning:" + finfo +
+ ", readOnly:" +finfo.isReadOnly() +
+ ", fileId:" +finfo.getFileId() +
+ ", fileSize:" +finfo.getSize() +
+ ", directoryId:" + finfo.getDirectoryId() +
+ ", createdDate: " + new Date(finfo.getCreationDateTime()) +
+ ", accessDate:" + new Date(finfo.getAccessDateTime()) +
+ ", modifiedDate:" + new Date(finfo.getModifyDateTime()) +
+ ", changeDate:" + new Date(finfo.getChangeDateTime()) +
+ ", fileAttributes: 0x"+ Integer.toHexString(info.getFileAttributes()) +
+ ", mode: 0x" + Integer.toHexString(finfo.getMode()));
+ }
+
+ return finfo;
+ }
+ }
+
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("getFileInformation Return:" + path + " returning" + info);
+ }
+
+ return info;
+
+ }
+
+ @Override
+ public int fileExists(SrvSession sess, TreeConnection tree, String path)
+ {
+ String deviceName = tree.getSharedDevice().getName();
+
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("fileExists session:" + sess.getUniqueId() + ", deviceName" + deviceName + ", path:" + path);
+ }
+
+ FileInfoKey key = new FileInfoKey(sess, path, tree);
+
+ FileInfo fromCache = fileInfoCache.get(key);
+
+ if(fromCache != null)
+ {
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("fileExists found FileInfo in cache");
+ }
+ if (fromCache.isDirectory())
+ {
+ return FileStatus.DirectoryExists;
+ }
+ else
+ {
+ return FileStatus.FileExists;
+ }
+ }
+ else
+ {
+ try
+ {
+ FileInfo lookup = getFileInformationInternal(sess, tree, path);
+
+ if(logger.isDebugEnabled())
+ {
+ logger.debug("fileExists obtained file information");
+ }
+ if (lookup.isDirectory())
+ {
+ return FileStatus.DirectoryExists;
+ }
+ else
+ {
+ return FileStatus.FileExists;
+ }
+ }
+ catch (IOException ie)
+ {
+ return FileStatus.NotExist;
+ }
+ }
+ }
+
+ @Override
+ public DeviceContext createContext(String shareName, ConfigElement args)
+ throws DeviceContextException
+ {
+ return diskInterface.createContext(shareName, args);
+ }
+
+ @Override
+ public void treeOpened(SrvSession sess, TreeConnection tree)
+ {
+ diskInterface.treeOpened(sess, tree);
+ }
+
+ @Override
+ public void treeClosed(SrvSession sess, TreeConnection tree)
+ {
+ diskInterface.treeClosed(sess, tree);
+ }
+
+ @Override
+ public DataBuffer processIOControl(SrvSession sess, TreeConnection tree,
+ int ctrlCode, int fid, DataBuffer dataBuf, boolean isFSCtrl,
+ int filter) throws IOControlNotImplementedException, SMBException
+ {
+ return ioctlInterface.processIOControl(sess, tree, ctrlCode, fid, dataBuf, isFSCtrl, filter);
+ }
+
+ @Override
+ public void getDiskInformation(DiskDeviceContext ctx, SrvDiskInfo diskDev)
+ throws IOException
+ {
+ diskSizeInterface.getDiskInformation(ctx, diskDev);
+ }
+
+ @Override
+ public void closeFile(SrvSession sess, TreeConnection tree,
+ NetworkFile param) throws IOException
+ {
+ diskInterface.closeFile(sess, tree, param);
+
+ /**
+ * If the fileInfo cache may have just had some content updated.
+ */
+ if(!param.isDirectory() && !param.isReadOnly())
+ {
+ fileInfoCache.clear();
+ }
+ }
+
+ @Override
+ public void createDirectory(SrvSession sess, TreeConnection tree,
+ FileOpenParams params) throws IOException
+ {
+ diskInterface.createDirectory(sess, tree, params);
+ }
+
+ @Override
+ public NetworkFile createFile(SrvSession sess, TreeConnection tree,
+ FileOpenParams params) throws IOException
+ {
+ return diskInterface.createFile(sess, tree, params);
+ }
+
+ @Override
+ public void deleteDirectory(SrvSession sess, TreeConnection tree, String dir)
+ throws IOException
+ {
+ fileInfoCache.remove(dir);
+
+ diskInterface.deleteDirectory(sess, tree, dir);
+ }
+
+ @Override
+ public void deleteFile(SrvSession sess, TreeConnection tree, String name)
+ throws IOException
+ {
+ fileInfoCache.remove(name);
+
+ diskInterface.deleteFile(sess, tree, name);
+ }
+
+ @Override
+ public void flushFile(SrvSession sess, TreeConnection tree, NetworkFile file)
+ throws IOException
+ {
+ diskInterface.flushFile(sess, tree, file);
+ }
+
+ @Override
+ public boolean isReadOnly(SrvSession sess, DeviceContext ctx)
+ throws IOException
+ {
+ return diskInterface.isReadOnly(sess, ctx);
+ }
+
+ @Override
+ public NetworkFile openFile(SrvSession sess, TreeConnection tree,
+ FileOpenParams params) throws IOException
+ {
+ return diskInterface.openFile(sess, tree, params);
+ }
+
+ @Override
+ public int readFile(SrvSession sess, TreeConnection tree, NetworkFile file,
+ byte[] buf, int bufPos, int siz, long filePos) throws IOException
+ {
+ return diskInterface.readFile(sess, tree, file, buf, bufPos, siz, filePos);
+ }
+
+ @Override
+ public void renameFile(SrvSession sess, TreeConnection tree,
+ String oldName, String newName) throws IOException
+ {
+ diskInterface.renameFile(sess, tree, oldName, newName);
+ }
+
+ @Override
+ public long seekFile(SrvSession sess, TreeConnection tree,
+ NetworkFile file, long pos, int typ) throws IOException
+ {
+ return diskInterface.seekFile(sess, tree, file, pos, typ);
+ }
+
+ @Override
+ public void setFileInformation(SrvSession sess, TreeConnection tree,
+ String name, FileInfo info) throws IOException
+ {
+ diskInterface.setFileInformation(sess, tree, name, info);
+ }
+
+ @Override
+ public SearchContext startSearch(SrvSession sess, TreeConnection tree,
+ String searchPath, int attrib) throws FileNotFoundException
+ {
+ return diskInterface.startSearch(sess, tree, searchPath, attrib);
+ }
+
+ @Override
+ public void truncateFile(SrvSession sess, TreeConnection tree,
+ NetworkFile file, long siz) throws IOException
+ {
+ diskInterface.truncateFile(sess, tree, file, siz);
+ }
+
+ @Override
+ public int writeFile(SrvSession sess, TreeConnection tree,
+ NetworkFile file, byte[] buf, int bufoff, int siz, long fileoff)
+ throws IOException
+ {
+ return diskInterface.writeFile(sess, tree, file, buf, bufoff, siz, fileoff);
+ }
+
+ @Override
+ public void registerContext(DeviceContext ctx)
+ throws DeviceContextException
+ {
+ diskInterface.registerContext(ctx);
+ }
+
+ public void setDiskInterface(ExtendedDiskInterface diskInterface)
+ {
+ this.diskInterface = diskInterface;
+ }
+
+ public ExtendedDiskInterface getDiskInterface()
+ {
+ return diskInterface;
+ }
+
+ public void setDiskSizeInterface(DiskSizeInterface diskSizeInterface)
+ {
+ this.diskSizeInterface = diskSizeInterface;
+ }
+
+ public DiskSizeInterface getDiskSizeInterface()
+ {
+ return diskSizeInterface;
+ }
+
+ public void setIoctlInterface(IOCtlInterface iocltlInterface)
+ {
+ this.ioctlInterface = iocltlInterface;
+ }
+
+ public IOCtlInterface getIoctlInterface()
+ {
+ return ioctlInterface;
+ }
+
+ @Override
+ public void onMoveNode(ChildAssociationRef oldChildAssocRef,
+ ChildAssociationRef newChildAssocRef)
+ {
+ if(fileInfoCache.contains(oldChildAssocRef.getChildRef()))
+ {
+ logger.debug("cached node moved - clear the cache");
+ fileInfoCache.clear();
+ }
+ }
+
+ @Override
+ public void onDeleteNode(ChildAssociationRef oldChildAssocRef, boolean isArchived)
+ {
+ if(fileInfoCache.contains(oldChildAssocRef.getChildRef()))
+ {
+ logger.debug("cached node deleted - clear the cache");
+ fileInfoCache.clear();
+ }
+ }
+
+ public void setPolicyComponent(PolicyComponent policyComponent)
+ {
+ this.policyComponent = policyComponent;
+ }
+
+ public PolicyComponent getPolicyComponent()
+ {
+ return policyComponent;
+ }
+
+ public void setOpLockInterface(OpLockInterface opLockInterface)
+ {
+ this.opLockInterface = opLockInterface;
+ }
+
+ public OpLockInterface getOpLockInterface()
+ {
+ return opLockInterface;
+ }
+
+ @Override
+ public OpLockManager getOpLockManager(SrvSession sess, TreeConnection tree)
+ {
+ return opLockInterface.getOpLockManager(sess, tree);
+ }
+
+ @Override
+ public boolean isOpLocksEnabled(SrvSession sess, TreeConnection tree)
+ {
+ return opLockInterface.isOpLocksEnabled(sess, tree);
+ }
+
+ @Override
+ public LockManager getLockManager(SrvSession sess, TreeConnection tree)
+ {
+ return getFileLockingInterface().getLockManager(sess, tree);
+ }
+
+
+ public void setFileLockingInterface(FileLockingInterface fileLockingInterface)
+ {
+ this.fileLockingInterface = fileLockingInterface;
+ }
+
+
+ public FileLockingInterface getFileLockingInterface()
+ {
+ return fileLockingInterface;
+ }
+}
\ No newline at end of file
diff --git a/source/java/org/alfresco/filesys/repo/CIFSContentComparator.java b/source/java/org/alfresco/filesys/repo/CIFSContentComparator.java
index 3c4e55a700..74762111ff 100644
--- a/source/java/org/alfresco/filesys/repo/CIFSContentComparator.java
+++ b/source/java/org/alfresco/filesys/repo/CIFSContentComparator.java
@@ -1,498 +1,498 @@
-package org.alfresco.filesys.repo;
-
-import java.io.BufferedInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-
-import org.alfresco.service.cmr.repository.ContentIOException;
-import org.alfresco.service.cmr.repository.ContentReader;
-import org.alfresco.util.EqualsHelper;
-import org.alfresco.util.TempFileProvider;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.poi.hslf.HSLFSlideShow;
-import org.apache.poi.hssf.usermodel.HSSFWorkbook;
-import org.apache.poi.poifs.filesystem.DirectoryEntry;
-import org.apache.poi.poifs.filesystem.EntryUtils;
-import org.apache.poi.poifs.filesystem.FilteringDirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
-
-/**
- * Compares content for to see if content is equal.
- *
- * Most mimetypes can simply be binary compared but for some mimetypes
- * there may be trivial differences so a binary compare is not sufficient.
- *
+ * Most mimetypes can simply be binary compared but for some mimetypes
+ * there may be trivial differences so a binary compare is not sufficient.
+ *