mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-06-30 18:15:39 +00:00
20678: DAO5 branch: Preparation for merge back to HEAD 20689: Merged DAO4 to DAO5 - Removed all 'dbscripts/create/3.x/SomeDialect' and replaced with 'dbscripts/create/SomeDialect' DB create scripts are taken from latest DAO4 - TODO: FixAuthoritiesCrcValuesPatch needs query implementation in PatchDAO Merged DAO3 to DAO4 - Reapplied fixes for ALF-713 (race condition on Usages) 19350: Merged BRANCHES/DEV/V3.3-DAO-REFACTOR-2 to BRANCHES/DEV/V3.3-DAO-REFACTOR-3: 18939: SAIL-4 :2nd stage branch for DAO refactor off HEAD rev 18898 18948: Merged V3.3-DAO-REFACTOR to V3.3-DAO-REFACTOR-2 18202: Dev branch for DAO refactor 18252: SAIL-233: QName.hbm.xml 18295: Added missing CREATE TABLE statements for QName-related code 18324: SAIL-234: Node.hbm.xml: Node aspects initial integration 18355: Added 'setValue' method to manually update the cached value 18356: MV property stressing lowered to speed test up 18357: SAIL-234: Node.hbm.xml 18376: Pulled all Alfresco-related create SQL into script 18389: SAIL-234: Permissions DAO refactor - initial checkpoint 18390: Formatting only (line-endings) 18400: SAIL-234: Node.hbm.xml 18418: SAIL-234: Node.hbm.xml: 'alf_node_assoc' CRUD 18429: SAIL-234: Node.hbm.xml: Cleaned out all Hibernate references to NodeAssocImpl 18457: SAIL-234: Permissions DAO refactor 18959: Merged DEV/V3.3-DAO-REFACTOR to DEV/V3.3-DAO-REFACTOR-2 18479: SAIL-234: Node.hbm.xml - fix updateNode (missing id when saving oldDummyNode) 18482: SAIL-235: remove Permissions.hbm.xml 18517: SAIL-235: Permissions DAO refactor 18523: SAIL-234: Node.hbm.xml 18524: SAIL-235: Permissions DAO refactor 18960: Merged DEV/V3.3-DAO-REFACTOR to DEV/V3.3-DAO-REFACTOR-2 18533: Flipped back to Windows line endings 18535: Formatting-only (eol) 18540: Formatting-only (eol) 18541: SAIL-235: Permissions DAO refactor 18543: SAIL-234: Node.hbm.xml: Start alf_store changes 18567: SAIL-235: Permissions DAO refactor 18596: SAIL-305: Alfresco DDL - formatted/rationalized and added missing indexes & fk constraints 18603: SAIL-311: Minor cleanup for schema upgrade scripts (V3.3) 18604: SAIL-311: Remove empty dirs 18619: SAIL-274: Locale.hbm.xml 18621: Added method to create default ACL 18622: SAIL-234: Node.hbm.xml: Store, Transaction, Server and some node 18624: Formatting only (eol) 18631: SAIL-235: Permissions DAO refactor 18633: SAIL-235: Permissions DAO refactor - do not expose CRUD for AceContext (or AuthorityAlias) since currently unused 18639: getLocale(Locale) should return null if it doesn't exist 18640: SAIL-234 NodeDAO: More replacement of node queries and updates 18648: SAIL-310: Create SQL script for core repo tables (All DB ports) 18651: SAIL-234 NodeDAO: Moves across stores handle presence of target deleted nodes 18961: Merged DEV/V3.3-DAO-REFACTOR to DEV/V3.3-DAO-REFACTOR-2 18658: SAIL-274 Locale DAO: Missing getValueKey() method 18662: SAIL-235: Permissions DAO refactor - further cleanup (of DbAccessControlList usage, including copyACLs) 18664: DB scripts porting for PostgreSQL finished. 18668: SAIL-234 Node DAO: Note in case Transaction Change ID is dropped from indexes 18669: SAIL-234 Node DAO: deleteNode and archive (store move) fixes 18672: DB scripts porting for Oracle finished. 18675: SAIL-235: Permissions DAO refactor 18677: DB scripts porting for DB2 finished. 18964: Merged DEV/V3.3-DAO-REFACTOR to DEV/V3.3-DAO-REFACTOR-2 18687: Execute a callback with retries 18688: SAIL-234 Node DAO: Child association creation 18690: SAIL-234 Node DAO: Comment out raw creation of stores as it breaks subsequent bootstrap checks 18691: SAIL-234 Node DAO: More replacement of alf_child_assoc handling 18713: Commented about needing a more efficient removeChildAssociation method 18714: SAIL-234 Node DAO: Replaced queries on alf_child_assoc 18715: SAIL-234 Node DAO: More alf_child_assoc query replacement 18727: SAIL-234 Node DAO: alf_child_assoc queries complete 18737: SAIL-234 Node DAO: Tweaks to newNode and implemented prependPaths 18741: SAIL-234 and SAIL-334: Moved UsageDelta Hibernate code and queries over to UsageDeltaDAO 18748: SAIL-234 Node DAO: fix NPE (EditionServiceImplTest) 18769: SAIL-234 Node DAO: alf_node_properties ground work 18786: SAIL-234 Node DAO: alf_node_properties and cm:auditable properties 18810: Added EqualsHelper.getMapComparison 18813: TransactionalCache propagates cache clears and removals during rollback 18826: SAIL-234 Node DAO: Moved over sundry references to NodeDaoService to NodeDAO 18849: SAIL-237: UsageDelta.hbm.xml - eol formatting only (including removal of unwanted svn:eol-style=native property) 18869: SAIL-234 NodeDAO: Fixed more references to 'nodeDaoService' 18895: SAIL-234 NodeDAO: Queries for alf_transaction 18899: SAIL-234 Node DAO: Fixed bean fetching for 'nodeDAO' 18909: SAIL-234 NodeDAO: Fixes to getNodeRefStatus and various txn queries 18916: SAIL-234 NodeDAO: Fixed moveNode alf_child_assoc updates 18922: SAIL-235: DAO refactoring: Permission.hbm.xml 18930: SAIL-235: DAO refactoring: Permission.hbm.xml 18932: SAIL-234 NodeDAO: Fixing up gotchas, javadocs and some naming 18933: SAIL-234 NodeDAO: Minor neatening 18935: SAIL-234 Node DAO: Caches for ID to NodeRef and StoreRef 18936: EHCache config files line endings 18938: SAIL-237: Usage DAO refactor - initial checkpoint 18945: SAIL-235: DAO refactoring: Permission.hbm.xml. Move Node. 18975: Fix for move-node ACL jiggery-pokery 19067: SAIL-4: fix VersionHistoryImpl.getSuccessors (causing VersionServiceImplTest.testGetVersionHistorySameWorkspace failure) 19068: SAIL-234: fix VersionMigratorTest.testMigrateOneVersion 19074: SAIL-237: Usage DAO - update to common iBatis mapping pattern(s) to ease DB porting 19076: SAIL-231: Activities DAO - update to common iBatis mapping pattern(s) 19077: SAIL-232: AppliedPatch DAO - minor cleanup (comments & formatting only) 19092: Merging HEAD to DEV/V3.3-DAO-REFACTOR-2 18973: Temporarily comment out AVMTestSuite and run AVM tests individually 19056: AVM unit test improvements 19097: SAIL-235: DAO refactoring: Permission.hbm.xml: Additional index to support queries to find the id and acl id for the primary children of a node. 19185: SAIL-238: Permissions DAO - (minor) update to common iBatis mapping pattern 19289: SAIL-234 NodeDAO: Node cache replaces NodeRef cache 19302: SAIL-234 Node DAO: Added cache for node properties 19318: SAIL-4: AVM DAO - (minor) update to common iBatis mapping pattern 20690: Merged BRANCHES/DEV/V3.3-DAO-REFACTOR-4 to BRANCHES/DEV/V3.3-DAO-REFACTOR-5: 20063: (RECORD ONLY) DAO refactor branch V4 20146: Merged BRANCHES/DEV/V3.3-DAO-REFACTOR-3 to BRANCHES/DEV/V3.3-DAO-REFACTOR-4: 19401: SAIL-234 Node DAO: Fix permission service tests (setPrimaryChildrenSharedAclId needs to invalidate nodesCache) 19428: Fixed TransactionalCache issue with null and NullValueMarker 19429: Took empty cm:content creation out of FileFolderService#createImpl 19430: SAIL-234 Node DAO: Tweaks around caching and cm:auditable 19431: SAIL-4 DAO Refactor: Exception thrown when attempting writes in read-only txn have changed 19436: SAIL-234 Node DAO: Fix NPE during cm:auditable update 19475: Allow debugging of code without stepping into trivial stuff 19476: Follow-up on 19429 by ensuring CIFS/FTP set a mimetype on the ContentWriter 19477: SAIL-234 Node DAO: Leverage DAO better for NodeService.addProperties 19478: SAIL-234 NodeDAO: Added toString() for ParentAssocsInfo (cache value for parent assocs) 19479: SAIL-234 Node DAO: Fixed for parent association and property caches 19480: Made TransactionAwareSingleton bind-key a GUID 19481: SAIL-234 Node DAO: Reinstated 100K collection property tests 19482: SAIL-234 Node DAO: Node and property cache fixes highlighted by unit tests 19483: SAIL-234 Node DAO: Start on NodeBulkLoader implementation 19595: SAIL-234 Node DAO: Fix moveNode to detect cyclic relationship prior to updating ACLs for moved tree FileFolderServiceImplTest.testETHREEOH_3088_MoveIntoSelf) 20147: Merged BRANCHES/DEV/V3.3-DAO-REFACTOR-3 to BRANCHES/DEV/V3.3-DAO-REFACTOR-4: 19602: (RECORD ONLY) Reintegrated with HEAD up to rev 19433 19621: (RECORD ONLY) SAIL-347 19683: (RECORD ONLY) Reverse-merged 19621 for SAIL-347 19722: (RECORD ONLY) Merged /alfresco/HEAD:r19434-19721 20150: Merged BRANCHES/DEV/V3.3-DAO-REFACTOR-3 to BRANCHES/DEV/V3.3-DAO-REFACTOR-4: 19741: Merged DEV\V3.3-DAO-REFACTOR-2 to DEV\V3.3-DAO-REFACTOR-3 19739: Extended "move" tests 19743: Fix AuditableAspectTest.testAddAspect (to allow for node modified date tolerance) 19748: Remaining part of merge from HEAD to V3.3-DAO-REFACTOR-3 19367: Merged BRANCHES/V3.2 to HEAD: 19286: Fix for ALF-626 "Using 'null' as an authority argument in clearPermissions() cause a java.lang.NullPointerException" 19755: SAIL-234 Node DAO: Fix RepoAdminServiceImplTest.testConcurrentDynamicModelDelete (handle InvalidNodeRefException after getChildAssocs) 20692: Merged BRANCHES/DEV/V3.3-DAO-REFACTOR-4 to BRANCHES/DEV/V3.3-DAO-REFACTOR-5: - Retired all 1.3 and 1.4 upgrade scripts ... R.I.P. - Fixed CRC patch for Authorities (only tested on MySQL) - Fixed SQL patch revision numbers and bumped version schema number up 20158: Merged BRANCHES/DEV/V3.3-DAO-REFACTOR-3 to BRANCHES/DEV/V3.3-DAO-REFACTOR-4: 19773: SQL mappings and scripts: SAIL-310, SAIL-304, SAIL-303 and SAIL-347 19774: Futher fix for SAIL-310: Sequence patch must take into account sequences created for 3.3 19851: SAIL-371 (SAIL-294) NodeDAO fallout: Fix QName and Namespace read/write handling and bean name in unit test 20183: Merged DAO3 to DAO4 19852: SAIL-370: Remove LinkValidation 19853: SAIL-239 (SAIL-294) Attributes.hbm.xml: Added ability to attach arbitrary property to unique context 19857: SAIL-373 Fallout from Permissions DAO refactor (SAIL-235) 19864: SAIL-239 (SAIL-294): Removed AttributeService RMI API 19865: More SAIL-239 (SAIL-294): Removed AttributeService RMI API 20208: DAO-refactor implementation of ALF-2712 query improvements 20209: Merged BRANCHES/DEV/V3.3-DAO-REFACTOR-3 to BRANCHES/DEV/V3.3-DAO-REFACTOR-4: 20060: Removal of AttributeService for SAIL-239 (SAIL-294) 20348: SAIL-371 (SAIL-294): Protect collection properties during map insert and retrieval 20547: SAIL-371 (SAIL-294) Attributes.hbm.xml: implement getAttributes + fixes 20573: SAIL-371 (SAIL-294): NodeDAO: Fix unit tests and other fallout 20597: SAIL-239 Attributes.hbm.xml: WCM/AVM locking test fixes (wip) 20598: SAIL-239 Attributes.hbm.xml: WCM/AVM locking test fixes (wip) - fix AssetServiceImplTest.testSimpleLockFile NPE 20600: Fix PropertyValueDAOTest.testPropertyValue_Enum (follow-on to r20060 for SAIL-239 - which introduces ENUM prop vals) 20601: Fix UsageDAOTest.testCreateAndDeleteUsageDeltas NPE (would also affect ContentStoreCleanerScalabilityRunner) 20603: Fix CMISPropertyServiceTest.* (fallout from r20146 <- r19429 <- Took empty cm:content creation out of FileFolderService#createImpl) 20604: SAIL-371 (SAIL-294): NodeDAO: Fix unit tests - TransferServiceImplTest.* 20618: SAIL-371 (SAIL-294): NodeDAO: AuditableAspectTest (fix testCreateNodeWithAuditableProperties_ALF_2565 + add remove aspect test) 20624: SAIL-371 (SAIL-294): NodeDAO: Fix unit tests - UserUsageTest.* 20626: Fixed random keys for RuleTrigger NodeRef tracking 20635: SAIL-371 (SAIL-294): NodeDAO: Fix unit tests - PersonTest.testSplitDuplicates 20642: SAIL-371 (SAIL-294) DAO: Fixed CacheTest 20643: Removed must of the 'distribute' target's dependencies. Not for HEAD 20645: Follow-on to r20643 (Removed most of the 'distribute' target's dependencies. Not for HEAD) 20654: SAIL-371 (SAIL-294): NodeDAO: DMDeploymentTargetTest.* (do not try to remove mandatory aspects) 20655: SAIL-371 (SAIL-294): NodeDAO: Initial fix for TaggingServiceImplTest.testTagScopeUpdateViaNodePolicies (+ minor test cleanup) 20657: SAIL-371 (SAIL-294): NodeDAO: Fix unit tests - VersionMigratorTest.testMigrateOneVersion (cm:accessed not returned if null) 20658: Merged (back merge only - no merge info) BRANCHES/V3.3 to BRANCHES/DEV/V3.3-DAO-REFACTOR-4: 20090: Dynamic models: minor improvements to DictionaryModelType 20554: Improvement to model delete validation (investigating intermittent failure of RepoAdminServiceImplTest.testSimpleDynamicModelViaNodeService) 20662: SAIL-371 (SAIL-294): NodeDAO: Fix unit tests - RecordsManagementAuditServiceImplTest.* (we now ignore attempt to update 'cm:modifier' prop so update 'cm:title' prop instead) 20666: SAIL-371 (SAIL-294): NodeDAO: Fix unit tests - ADMLuceneTest.* 20668: SAIL-239 (SAIL-294) - delete WCM locks + tests (follow-on to r20060) 20674: SAIL-371 (SAIL-294) NodeDAO fallout: Cleaner and additional checks for ContentStoreCleaner 20675: SAIL-371 (SAIL-294) NodeDAO fallout: Fixed handling of ContentData git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@20693 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
809 lines
25 KiB
Java
809 lines
25 KiB
Java
/*
|
|
* Copyright (C) 2005-2010 Alfresco Software Limited.
|
|
*
|
|
* This file is part of Alfresco
|
|
*
|
|
* Alfresco is free software: you can redistribute it and/or modify
|
|
* it under the terms of the GNU Lesser General Public License as published by
|
|
* the Free Software Foundation, either version 3 of the License, or
|
|
* (at your option) any later version.
|
|
*
|
|
* Alfresco is distributed in the hope that it will be useful,
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
* GNU Lesser General Public License for more details.
|
|
*
|
|
* You should have received a copy of the GNU Lesser General Public License
|
|
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
|
*/
|
|
package org.alfresco.filesys.repo;
|
|
|
|
import java.io.BufferedInputStream;
|
|
import java.io.FileNotFoundException;
|
|
import java.io.IOException;
|
|
import java.io.InputStream;
|
|
import java.nio.ByteBuffer;
|
|
import java.nio.channels.Channels;
|
|
import java.nio.channels.FileChannel;
|
|
import java.nio.charset.Charset;
|
|
|
|
import org.alfresco.error.AlfrescoRuntimeException;
|
|
import org.springframework.extensions.surf.util.I18NUtil;
|
|
import org.alfresco.jlan.server.SrvSession;
|
|
import org.alfresco.jlan.server.filesys.AccessDeniedException;
|
|
import org.alfresco.jlan.server.filesys.DiskFullException;
|
|
import org.alfresco.jlan.server.filesys.FileAttribute;
|
|
import org.alfresco.jlan.server.filesys.FileInfo;
|
|
import org.alfresco.jlan.server.filesys.FileOpenParams;
|
|
import org.alfresco.jlan.server.filesys.NetworkFile;
|
|
import org.alfresco.jlan.smb.SeekType;
|
|
import org.alfresco.jlan.smb.server.SMBSrvSession;
|
|
import org.alfresco.model.ContentModel;
|
|
import org.alfresco.repo.content.AbstractContentReader;
|
|
import org.alfresco.repo.content.MimetypeMap;
|
|
import org.alfresco.repo.content.encoding.ContentCharsetFinder;
|
|
import org.alfresco.repo.content.filestore.FileContentReader;
|
|
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
|
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
|
|
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
|
|
import org.alfresco.repo.transaction.TransactionListenerAdapter;
|
|
import org.alfresco.service.cmr.repository.ContentAccessor;
|
|
import org.alfresco.service.cmr.repository.ContentData;
|
|
import org.alfresco.service.cmr.repository.ContentIOException;
|
|
import org.alfresco.service.cmr.repository.ContentReader;
|
|
import org.alfresco.service.cmr.repository.ContentService;
|
|
import org.alfresco.service.cmr.repository.ContentWriter;
|
|
import org.alfresco.service.cmr.repository.MimetypeService;
|
|
import org.alfresco.service.cmr.repository.NodeRef;
|
|
import org.alfresco.service.cmr.repository.NodeService;
|
|
import org.alfresco.service.cmr.usage.ContentQuotaException;
|
|
import org.apache.commons.logging.Log;
|
|
import org.apache.commons.logging.LogFactory;
|
|
|
|
/**
|
|
* Implementation of the <tt>NetworkFile</tt> for direct interaction
|
|
* with the channel repository.
|
|
* <p>
|
|
* This provides the interaction with the Alfresco Content Model file/folder structure.
|
|
*
|
|
* @author Derek Hulley
|
|
*/
|
|
public class ContentNetworkFile extends NodeRefNetworkFile
|
|
{
|
|
private static final Log logger = LogFactory.getLog(ContentNetworkFile.class);
|
|
|
|
// Services
|
|
|
|
private NodeService nodeService;
|
|
private ContentService contentService;
|
|
private MimetypeService mimetypeService;
|
|
|
|
// File channel to file content
|
|
|
|
private FileChannel channel;
|
|
|
|
// File content
|
|
|
|
private ContentAccessor content;
|
|
private String preUpdateContentURL;
|
|
|
|
// Indicate if file has been written to or truncated/resized
|
|
|
|
private boolean modified;
|
|
|
|
// Flag to indicate if the file channel is writable
|
|
|
|
private boolean writableChannel;
|
|
|
|
/**
|
|
* Helper method to create a {@link NetworkFile network file} given a node reference.
|
|
*/
|
|
public static ContentNetworkFile createFile( NodeService nodeService, ContentService contentService, MimetypeService mimetypeService,
|
|
CifsHelper cifsHelper, NodeRef nodeRef, FileOpenParams params, SrvSession sess)
|
|
{
|
|
String path = params.getPath();
|
|
|
|
// Create the file
|
|
|
|
ContentNetworkFile netFile = null;
|
|
|
|
if ( isMSOfficeSpecialFile(path, sess, nodeService, nodeRef)) {
|
|
|
|
// Create a file for special processing
|
|
|
|
netFile = new MSOfficeContentNetworkFile( nodeService, contentService, mimetypeService, nodeRef, path);
|
|
}
|
|
else if ( isOpenOfficeSpecialFile( path, sess, nodeService, nodeRef)) {
|
|
|
|
// Create a file for special processing
|
|
|
|
netFile = new OpenOfficeContentNetworkFile( nodeService, contentService, mimetypeService, nodeRef, path);
|
|
}
|
|
else {
|
|
|
|
// Create a normal content file
|
|
|
|
netFile = new ContentNetworkFile(nodeService, contentService, mimetypeService, nodeRef, path);
|
|
}
|
|
|
|
// Set relevant parameters
|
|
|
|
if (params.isReadOnlyAccess())
|
|
{
|
|
netFile.setGrantedAccess(NetworkFile.READONLY);
|
|
}
|
|
else
|
|
{
|
|
netFile.setGrantedAccess(NetworkFile.READWRITE);
|
|
}
|
|
|
|
// Check the type
|
|
|
|
FileInfo fileInfo;
|
|
try
|
|
{
|
|
fileInfo = cifsHelper.getFileInformation(nodeRef, "");
|
|
}
|
|
catch (FileNotFoundException e)
|
|
{
|
|
throw new AlfrescoRuntimeException("File not found when creating network file: " + nodeRef, e);
|
|
}
|
|
|
|
if (fileInfo.isDirectory())
|
|
{
|
|
netFile.setAttributes(FileAttribute.Directory);
|
|
}
|
|
else
|
|
{
|
|
// Set the current size
|
|
|
|
netFile.setFileSize(fileInfo.getSize());
|
|
}
|
|
|
|
// Set the file timestamps
|
|
|
|
if ( fileInfo.hasCreationDateTime())
|
|
netFile.setCreationDate( fileInfo.getCreationDateTime());
|
|
|
|
if ( fileInfo.hasModifyDateTime())
|
|
netFile.setModifyDate(fileInfo.getModifyDateTime());
|
|
|
|
if ( fileInfo.hasAccessDateTime())
|
|
netFile.setAccessDate(fileInfo.getAccessDateTime());
|
|
|
|
// Set the file attributes
|
|
|
|
netFile.setAttributes(fileInfo.getFileAttributes());
|
|
|
|
// Set the owner process id
|
|
|
|
netFile.setProcessId( params.getProcessId());
|
|
|
|
// If the file is read-only then only allow read access
|
|
|
|
if ( netFile.isReadOnly())
|
|
netFile.setGrantedAccess(NetworkFile.READONLY);
|
|
|
|
// DEBUG
|
|
|
|
if (logger.isDebugEnabled())
|
|
logger.debug("Create file node=" + nodeRef + ", param=" + params + ", netfile=" + netFile);
|
|
|
|
// Return the network file
|
|
|
|
return netFile;
|
|
}
|
|
|
|
/**
|
|
* Class constructor
|
|
*
|
|
* @param transactionService TransactionService
|
|
* @param nodeService NodeService
|
|
* @param contentService ContentService
|
|
* @param nodeRef NodeRef
|
|
* @param name String
|
|
*/
|
|
protected ContentNetworkFile(
|
|
NodeService nodeService,
|
|
ContentService contentService,
|
|
MimetypeService mimetypeService,
|
|
NodeRef nodeRef,
|
|
String name)
|
|
{
|
|
super(name, nodeRef);
|
|
setFullName(name);
|
|
this.nodeService = nodeService;
|
|
this.contentService = contentService;
|
|
this.mimetypeService = mimetypeService;
|
|
}
|
|
|
|
/**
|
|
* Return the file details as a string
|
|
*
|
|
* @return String
|
|
*/
|
|
public String toString()
|
|
{
|
|
StringBuilder str = new StringBuilder();
|
|
|
|
str.append( "[");
|
|
str.append(getFullName());
|
|
str.append(",");
|
|
str.append( getNodeRef().getId());
|
|
str.append( ",channel=");
|
|
str.append( channel);
|
|
if ( channel != null)
|
|
str.append( writableChannel ? "(Write)" : "(Read)");
|
|
if ( modified)
|
|
str.append( ",modified");
|
|
str.append( "]");
|
|
|
|
return str.toString();
|
|
}
|
|
|
|
/**
|
|
* @return Returns true if the channel should be writable
|
|
*
|
|
* @see NetworkFile#getGrantedAccess()
|
|
* @see NetworkFile#READONLY
|
|
* @see NetworkFile#WRITEONLY
|
|
* @see NetworkFile#READWRITE
|
|
*/
|
|
private boolean isWritable()
|
|
{
|
|
// Check that we are allowed to write
|
|
|
|
int access = getGrantedAccess();
|
|
return (access == NetworkFile.READWRITE || access == NetworkFile.WRITEONLY);
|
|
}
|
|
|
|
/**
|
|
* Determine if the file content data has been opened
|
|
*
|
|
* @return boolean
|
|
*/
|
|
public final boolean hasContent()
|
|
{
|
|
return content != null ? true : false;
|
|
}
|
|
|
|
/**
|
|
* Opens the channel for reading or writing depending on the access mode.
|
|
* <p>
|
|
* If the channel is already open, it is left.
|
|
*
|
|
* @param write true if the channel must be writable
|
|
* @param trunc true if the writable channel does not require the previous content data
|
|
* @throws AccessDeniedException if this network file is read only
|
|
* @throws AlfrescoRuntimeException if this network file represents a directory
|
|
*
|
|
* @see NetworkFile#getGrantedAccess()
|
|
* @see NetworkFile#READONLY
|
|
* @see NetworkFile#WRITEONLY
|
|
* @see NetworkFile#READWRITE
|
|
*/
|
|
protected void openContent(boolean write, boolean trunc)
|
|
throws AccessDeniedException, AlfrescoRuntimeException
|
|
{
|
|
// Check if the file is a directory
|
|
|
|
if (isDirectory())
|
|
{
|
|
throw new AlfrescoRuntimeException("Unable to open channel for a directory network file: " + this);
|
|
}
|
|
|
|
// Check if write access is required and the current channel is read-only
|
|
|
|
else if ( write && writableChannel == false && channel != null)
|
|
{
|
|
// Close the existing read-only channel
|
|
|
|
try
|
|
{
|
|
channel.close();
|
|
channel = null;
|
|
}
|
|
catch (IOException ex)
|
|
{
|
|
logger.error("Error closing read-only channel", ex);
|
|
}
|
|
|
|
// Debug
|
|
|
|
if ( logger.isDebugEnabled())
|
|
logger.debug("Switching to writable channel for " + getName());
|
|
}
|
|
else if (channel != null)
|
|
{
|
|
// Already have channel open
|
|
|
|
return;
|
|
}
|
|
|
|
// We need to create the channel
|
|
|
|
if (write && !isWritable())
|
|
{
|
|
throw new AccessDeniedException("The network file was created for read-only: " + this);
|
|
}
|
|
|
|
content = null;
|
|
preUpdateContentURL = null;
|
|
if (write)
|
|
{
|
|
// Get a writeable channel to the content, along with the original content
|
|
|
|
content = contentService.getWriter( getNodeRef(), ContentModel.PROP_CONTENT, false);
|
|
|
|
// Keep the original content for later comparison
|
|
|
|
ContentData preUpdateContentData = (ContentData) nodeService.getProperty( getNodeRef(), ContentModel.PROP_CONTENT);
|
|
if (preUpdateContentData != null)
|
|
{
|
|
preUpdateContentURL = preUpdateContentData.getContentUrl();
|
|
}
|
|
|
|
// Indicate that we have a writable channel to the file
|
|
|
|
writableChannel = true;
|
|
|
|
// Get the writable channel, do not copy existing content data if the file is to be truncated
|
|
|
|
channel = ((ContentWriter) content).getFileChannel( trunc);
|
|
}
|
|
else
|
|
{
|
|
// Get a read-only channel to the content
|
|
|
|
content = contentService.getReader( getNodeRef(), ContentModel.PROP_CONTENT);
|
|
|
|
// Ensure that the content we are going to read is valid
|
|
|
|
content = FileContentReader.getSafeContentReader(
|
|
(ContentReader) content,
|
|
I18NUtil.getMessage(FileContentReader.MSG_MISSING_CONTENT),
|
|
getNodeRef(), content);
|
|
|
|
// Indicate that we only have a read-only channel to the data
|
|
|
|
writableChannel = false;
|
|
|
|
// Get the read-only channel
|
|
|
|
channel = ((ContentReader) content).getFileChannel();
|
|
}
|
|
|
|
// Update the current file size
|
|
|
|
if ( channel != null) {
|
|
try {
|
|
setFileSize(channel.size());
|
|
}
|
|
catch (IOException ex) {
|
|
logger.error( ex);
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Close the file
|
|
*
|
|
* @exception IOException
|
|
*/
|
|
public void closeFile()
|
|
throws IOException
|
|
{
|
|
// Check if this is a directory
|
|
|
|
if (isDirectory())
|
|
{
|
|
// Nothing to do
|
|
|
|
return;
|
|
}
|
|
else if (!hasContent())
|
|
{
|
|
// File was not read/written so channel was not opened
|
|
|
|
return;
|
|
}
|
|
|
|
// Check if the file has been modified
|
|
|
|
if (modified)
|
|
{
|
|
NodeRef contentNodeRef = getNodeRef();
|
|
// We may be in a retry block, in which case this section will already have executed and channel will be null
|
|
if (channel != null)
|
|
{
|
|
// Take a guess at the mimetype (if it has not been set by something already)
|
|
if (content.getMimetype() == null || content.getMimetype().equals(MimetypeMap.MIMETYPE_BINARY) )
|
|
{
|
|
String filename = (String) nodeService.getProperty(contentNodeRef, ContentModel.PROP_NAME);
|
|
String mimetype = mimetypeService.guessMimetype(filename);
|
|
content.setMimetype(mimetype);
|
|
}
|
|
// Take a guess at the locale
|
|
channel.position(0);
|
|
InputStream is = new BufferedInputStream(Channels.newInputStream(channel));
|
|
ContentCharsetFinder charsetFinder = mimetypeService.getContentCharsetFinder();
|
|
Charset charset = charsetFinder.getCharset(is, content.getMimetype());
|
|
content.setEncoding(charset.name());
|
|
|
|
// Close the channel
|
|
|
|
channel.close();
|
|
channel = null;
|
|
}
|
|
|
|
// Retrieve the content data and stop the content URL from being 'eagerly deleted', in case we need to
|
|
// retry the transaction
|
|
|
|
final ContentData contentData = content.getContentData();
|
|
|
|
// Update node properties, but only if the binary has changed (ETHREEOH-1861)
|
|
|
|
ContentReader postUpdateContentReader = ((ContentWriter) content).getReader();
|
|
|
|
RunAsWork<ContentReader> getReader = new RunAsWork<ContentReader>()
|
|
{
|
|
public ContentReader doWork() throws Exception
|
|
{
|
|
return preUpdateContentURL == null ? null : contentService.getRawReader(preUpdateContentURL);
|
|
}
|
|
};
|
|
ContentReader preUpdateContentReader = AuthenticationUtil.runAs(getReader, AuthenticationUtil.getSystemUserName());
|
|
|
|
boolean contentChanged = preUpdateContentURL == null
|
|
|| !AbstractContentReader.compareContentReaders(preUpdateContentReader,
|
|
postUpdateContentReader);
|
|
|
|
if (contentChanged)
|
|
{
|
|
nodeService.removeAspect(contentNodeRef, ContentModel.ASPECT_NO_CONTENT);
|
|
try
|
|
{
|
|
nodeService.setProperty( contentNodeRef, ContentModel.PROP_CONTENT, contentData);
|
|
}
|
|
catch (ContentQuotaException qe)
|
|
{
|
|
throw new DiskFullException(qe.getMessage());
|
|
}
|
|
}
|
|
|
|
// Tidy up after ourselves after a successful commit. Otherwise leave things to allow a retry.
|
|
AlfrescoTransactionSupport.bindListener(new TransactionListenerAdapter()
|
|
{
|
|
@Override
|
|
public void afterCommit()
|
|
{
|
|
content = null;
|
|
preUpdateContentURL = null;
|
|
}
|
|
});
|
|
}
|
|
else if (channel != null)
|
|
{
|
|
// Close it - it was not modified
|
|
|
|
channel.close();
|
|
channel = null;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Truncate or extend the file to the specified length
|
|
*
|
|
* @param size long
|
|
* @exception IOException
|
|
*/
|
|
public void truncateFile(long size)
|
|
throws IOException
|
|
{
|
|
try {
|
|
// If the content data channel has not been opened yet and the requested size is zero
|
|
// then this is an open for overwrite so the existing content data is not copied
|
|
|
|
if ( hasContent() == false && size == 0L)
|
|
{
|
|
// Open content for overwrite, no need to copy existing content data
|
|
|
|
openContent(true, true);
|
|
}
|
|
else
|
|
{
|
|
// Normal open for write
|
|
|
|
openContent(true, false);
|
|
|
|
// Truncate or extend the channel
|
|
|
|
channel.truncate(size);
|
|
}
|
|
}
|
|
catch ( ContentIOException ex) {
|
|
|
|
// DEBUG
|
|
|
|
if ( logger.isDebugEnabled())
|
|
logger.debug("Error opening file " + getFullName() + " for write", ex);
|
|
|
|
// Convert to a file server I/O error
|
|
|
|
throw new DiskFullException("Failed to open " + getFullName() + " for write");
|
|
}
|
|
|
|
// Set modification flag
|
|
|
|
modified = true;
|
|
|
|
// Set the new file size
|
|
|
|
setFileSize( size);
|
|
|
|
// Update the modification date/time
|
|
|
|
if ( getFileState() != null)
|
|
getFileState().updateModifyDateTime();
|
|
|
|
// DEBUG
|
|
|
|
if (logger.isDebugEnabled())
|
|
logger.debug("Truncate file=" + this + ", size=" + size);
|
|
}
|
|
|
|
/**
|
|
* Write a block of data to the file.
|
|
*
|
|
* @param buf byte[]
|
|
* @param len int
|
|
* @param pos int
|
|
* @param fileOff long
|
|
* @exception IOException
|
|
*/
|
|
public void writeFile(byte[] buffer, int length, int position, long fileOffset)
|
|
throws IOException
|
|
{
|
|
try {
|
|
// Open the channel for writing
|
|
|
|
openContent(true, false);
|
|
}
|
|
catch ( ContentIOException ex) {
|
|
|
|
// DEBUG
|
|
|
|
if ( logger.isDebugEnabled())
|
|
logger.debug("Error opening file " + getFullName() + " for write", ex);
|
|
|
|
// Convert to a file server I/O error
|
|
|
|
throw new DiskFullException("Failed to open " + getFullName() + " for write");
|
|
}
|
|
|
|
// Write to the channel
|
|
|
|
ByteBuffer byteBuffer = ByteBuffer.wrap(buffer, position, length);
|
|
int count = channel.write(byteBuffer, fileOffset);
|
|
|
|
// Set modification flag
|
|
|
|
modified = true;
|
|
incrementWriteCount();
|
|
|
|
// Update the current file size
|
|
|
|
setFileSize(channel.size());
|
|
|
|
// Update the modification date/time
|
|
|
|
if ( getFileState() != null)
|
|
getFileState().updateModifyDateTime();
|
|
|
|
// DEBUG
|
|
|
|
if (logger.isDebugEnabled())
|
|
logger.debug("Write file=" + this + ", size=" + count);
|
|
}
|
|
|
|
/**
|
|
* Read from the file.
|
|
*
|
|
* @param buf byte[]
|
|
* @param len int
|
|
* @param pos int
|
|
* @param fileOff long
|
|
* @return Length of data read.
|
|
* @exception IOException
|
|
*/
|
|
public int readFile(byte[] buffer, int length, int position, long fileOffset)
|
|
throws IOException
|
|
{
|
|
// Open the channel for reading
|
|
|
|
openContent(false, false);
|
|
|
|
// Read from the channel
|
|
|
|
ByteBuffer byteBuffer = ByteBuffer.wrap(buffer, position, length);
|
|
int count = channel.read(byteBuffer, fileOffset);
|
|
if (count < 0)
|
|
{
|
|
count = 0; // doesn't obey the same rules, i.e. just returns the bytes read
|
|
}
|
|
|
|
// Update the access date/time
|
|
|
|
if ( getFileState() != null)
|
|
getFileState().updateAccessDateTime();
|
|
|
|
// DEBUG
|
|
|
|
if (logger.isDebugEnabled())
|
|
logger.debug("Read file=" + this + " read=" + count);
|
|
|
|
// Return the actual count of bytes read
|
|
|
|
return count;
|
|
}
|
|
|
|
/**
|
|
* Open the file
|
|
*
|
|
* @param createFlag boolean
|
|
* @exception IOException
|
|
*/
|
|
@Override
|
|
public void openFile(boolean createFlag)
|
|
throws IOException
|
|
{
|
|
// Wait for read/write before opening the content channel
|
|
}
|
|
|
|
/**
|
|
* Seek to a new position in the file
|
|
*
|
|
* @param pos long
|
|
* @param typ int
|
|
* @return long
|
|
*/
|
|
@Override
|
|
public long seekFile(long pos, int typ)
|
|
throws IOException
|
|
{
|
|
// Open the file, if not already open
|
|
|
|
openContent( false, false);
|
|
|
|
// Check if the current file position is the required file position
|
|
|
|
long curPos = channel.position();
|
|
|
|
switch (typ) {
|
|
|
|
// From start of file
|
|
|
|
case SeekType.StartOfFile :
|
|
if (curPos != pos)
|
|
channel.position( pos);
|
|
break;
|
|
|
|
// From current position
|
|
|
|
case SeekType.CurrentPos :
|
|
channel.position( curPos + pos);
|
|
break;
|
|
|
|
// From end of file
|
|
|
|
case SeekType.EndOfFile :
|
|
{
|
|
long newPos = channel.size() + pos;
|
|
channel.position(newPos);
|
|
}
|
|
break;
|
|
}
|
|
|
|
// Update the access date/time
|
|
|
|
if ( getFileState() != null)
|
|
getFileState().updateAccessDateTime();
|
|
|
|
// DEBUG
|
|
|
|
if (logger.isDebugEnabled())
|
|
logger.debug("Seek file=" + this + ", pos=" + pos + ", type=" + typ);
|
|
|
|
// Return the new file position
|
|
|
|
return channel.position();
|
|
}
|
|
|
|
/**
|
|
* Flush and buffered data for this file
|
|
*
|
|
* @exception IOException
|
|
*/
|
|
@Override
|
|
public void flushFile()
|
|
throws IOException
|
|
{
|
|
// Open the channel for writing
|
|
|
|
openContent(true, false);
|
|
|
|
// Flush the channel - metadata flushing is not important
|
|
|
|
channel.force(false);
|
|
|
|
// Update the access date/time
|
|
|
|
if ( getFileState() != null)
|
|
getFileState().updateAccessDateTime();
|
|
|
|
// DEBUG
|
|
|
|
if (logger.isDebugEnabled())
|
|
logger.debug("Flush file=" + this);
|
|
}
|
|
|
|
/**
|
|
* Return the modified status
|
|
*
|
|
* @return boolean
|
|
*/
|
|
public final boolean isModified() {
|
|
return modified;
|
|
}
|
|
|
|
/**
|
|
* Check if the file is an MS Office document type that needs special processing
|
|
*
|
|
* @param path String
|
|
* @param sess SrvSession
|
|
* @param nodeService NodeService
|
|
* @param nodeRef NodeRef
|
|
* @return boolean
|
|
*/
|
|
private static final boolean isMSOfficeSpecialFile( String path, SrvSession sess, NodeService nodeService, NodeRef nodeRef) {
|
|
|
|
// Check if the file extension indicates a problem MS Office format
|
|
|
|
path = path.toLowerCase();
|
|
|
|
if ( path.endsWith( ".xls") && sess instanceof SMBSrvSession) {
|
|
|
|
// Check if the file is versionable
|
|
|
|
if ( nodeService.hasAspect( nodeRef, ContentModel.ASPECT_VERSIONABLE))
|
|
return true;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
/**
|
|
* Check if the file is an OpenOffice document type that needs special processing
|
|
*
|
|
* @param path String
|
|
* @param sess SrvSession
|
|
* @param nodeService NodeService
|
|
* @param nodeRef NodeRef
|
|
* @return boolean
|
|
*/
|
|
private static final boolean isOpenOfficeSpecialFile( String path, SrvSession sess, NodeService nodeService, NodeRef nodeRef) {
|
|
|
|
// Check if the file extension indicates a problem OpenOffice format
|
|
|
|
path = path.toLowerCase();
|
|
|
|
if ( path.endsWith( ".odt") && sess instanceof SMBSrvSession) {
|
|
|
|
// Check if the file is versionable
|
|
|
|
if ( nodeService.hasAspect( nodeRef, ContentModel.ASPECT_VERSIONABLE))
|
|
return true;
|
|
}
|
|
return false;
|
|
}
|
|
}
|