Merge 1.4 to HEAD

svn merge svn://svn.alfresco.com:3691/alfresco/BRANCHES/V1.4@4340 svn://svn.alfresco.com:3691/alfresco/BRANCHES/V1.4@4350 .
   svn resolved root\projects\3rd-party\.classpath
   svn resolved root\projects\repository\source\java\org\alfresco\repo\workflow\WorkflowInterpreter.java
   svn merge svn://svn.alfresco.com:3691/alfresco/BRANCHES/V1.4@4379 svn://svn.alfresco.com:3691/alfresco/BRANCHES/V1.4@4380 .
   svn merge svn://svn.alfresco.com:3691/alfresco/BRANCHES/V1.4@4420 svn://svn.alfresco.com:3691/alfresco/BRANCHES/V1.4@4421 .
   svn resolved root\projects\3rd-party\.classpath


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@4655 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley
2006-12-19 14:24:45 +00:00
parent 473c9ff1ff
commit cfb373ae36
17 changed files with 473 additions and 70 deletions

View File

@@ -33,6 +33,7 @@ import org.alfresco.service.cmr.dictionary.PropertyDefinition;
import org.alfresco.service.cmr.dictionary.TypeDefinition;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentService;
import org.alfresco.service.cmr.repository.InvalidStoreRefException;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
@@ -213,7 +214,16 @@ public class ConfigurationChecker extends AbstractLifecycleBean
List<StoreRef> missingContentStoreRefs = new ArrayList<StoreRef>(0);
for (StoreRef storeRef : storeRefs)
{
NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
NodeRef rootNodeRef = null;
try
{
rootNodeRef = nodeService.getRootNode(storeRef);
}
catch (InvalidStoreRefException e)
{
// the store is invalid and will therefore not have a root node entry
continue;
}
if (indexRecoveryMode != RecoveryMode.FULL)
{
if (logger.isDebugEnabled())

View File

@@ -0,0 +1,205 @@
/*
* Copyright (C) 2005 Alfresco, Inc.
*
* Licensed under the Mozilla Public License version 1.1
* with a permitted attribution clause. You may obtain a
* copy of the License at
*
* http://www.alfresco.org/legal/license.txt
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package org.alfresco.repo.admin.patch.impl;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.Date;
import java.util.List;
import org.alfresco.i18n.I18NUtil;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.admin.patch.AbstractPatch;
import org.alfresco.repo.domain.hibernate.NodeImpl;
import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.service.cmr.admin.PatchException;
import org.alfresco.service.cmr.repository.DuplicateChildNodeNameException;
import org.alfresco.service.cmr.repository.NodeRef;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
/**
* Checks that all names do not end with ' ' or '.'
*
* @author David Caruana
*/
public class InvalidNameEndingPatch extends AbstractPatch
{
private static final String MSG_SUCCESS = "patch.invalidNameEnding.result";
private static final String MSG_REWRITTEN = "patch.invalidNameEnding.rewritten";
private static final String ERR_UNABLE_TO_FIX = "patch.invalidNameEnding.err.unable_to_fix";
private SessionFactory sessionFactory;
private NodeDaoService nodeDaoService;
public InvalidNameEndingPatch()
{
}
public void setSessionFactory(SessionFactory sessionFactory)
{
this.sessionFactory = sessionFactory;
}
/**
* @param nodeDaoService The service that generates the CRC values
*/
public void setNodeDaoService(NodeDaoService nodeDaoService)
{
this.nodeDaoService = nodeDaoService;
}
@Override
protected void checkProperties()
{
super.checkProperties();
checkPropertyNotNull(sessionFactory, "sessionFactory");
checkPropertyNotNull(nodeDaoService, "nodeDaoService");
}
@Override
protected String applyInternal() throws Exception
{
// initialise the helper
HibernateHelper helper = new HibernateHelper();
helper.setSessionFactory(sessionFactory);
try
{
String msg = helper.fixNames();
// done
return msg;
}
finally
{
helper.closeWriter();
}
}
private class HibernateHelper extends HibernateDaoSupport
{
private File logFile;
private FileChannel channel;
private HibernateHelper() throws IOException
{
logFile = new File("./InvalidNameEndingPatch.log");
// open the file for appending
RandomAccessFile outputFile = new RandomAccessFile(logFile, "rw");
channel = outputFile.getChannel();
// move to the end of the file
channel.position(channel.size());
// add a newline and it's ready
writeLine("").writeLine("");
writeLine("InvalidNameEndingPatch executing on " + new Date());
}
private HibernateHelper write(Object obj) throws IOException
{
channel.write(ByteBuffer.wrap(obj.toString().getBytes()));
return this;
}
private HibernateHelper writeLine(Object obj) throws IOException
{
write(obj);
write("\n");
return this;
}
private void closeWriter()
{
try { channel.close(); } catch (Throwable e) {}
}
public String fixNames() throws Exception
{
// get the association types to check
@SuppressWarnings("unused")
List<NodeImpl> nodes = getInvalidNames();
int updated = 0;
for (NodeImpl node : nodes)
{
NodeRef nodeRef = node.getNodeRef();
String name = (String)nodeService.getProperty(nodeRef, ContentModel.PROP_NAME);
if (name != null && (name.endsWith(".") || name.endsWith(" ")))
{
int i = (name.length() == 0) ? 0 : name.length() - 1;
while (i >= 0 && (name.charAt(i) == '.' || name.charAt(i) == ' '))
{
i--;
}
String updatedName = name.substring(0, i);
int idx = 0;
boolean applied = false;
while (!applied)
{
try
{
nodeService.setProperty(nodeRef, ContentModel.PROP_NAME, updatedName);
applied = true;
}
catch(DuplicateChildNodeNameException e)
{
idx++;
if (idx > 10)
{
writeLine(I18NUtil.getMessage(ERR_UNABLE_TO_FIX, name ,updatedName));
throw new PatchException(ERR_UNABLE_TO_FIX, logFile);
}
updatedName += "_" + idx;
}
}
writeLine(I18NUtil.getMessage(MSG_REWRITTEN, name ,updatedName));
updated++;
getSession().flush();
getSession().clear();
}
}
String msg = I18NUtil.getMessage(MSG_SUCCESS, updated, logFile);
return msg;
}
@SuppressWarnings("unchecked")
private List<NodeImpl> getInvalidNames()
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session
.createQuery(
"select node from org.alfresco.repo.domain.hibernate.NodeImpl as node " +
"join node.properties prop where " +
" prop.stringValue like '%.' or " +
" prop.stringValue like '% ' ");
return query.list();
}
};
List<NodeImpl> results = (List<NodeImpl>) getHibernateTemplate().execute(callback);
return results;
}
}
}

View File

@@ -104,10 +104,16 @@ public class UniqueChildNamePatch extends AbstractPatch
HibernateHelper helper = new HibernateHelper();
helper.setSessionFactory(sessionFactory);
String msg = helper.assignCrc();
// done
return msg;
try
{
String msg = helper.assignCrc();
// done
return msg;
}
finally
{
helper.closeWriter();
}
}
private class HibernateHelper extends HibernateDaoSupport
@@ -139,6 +145,10 @@ public class UniqueChildNamePatch extends AbstractPatch
write("\n");
return this;
}
private void closeWriter()
{
try { channel.close(); } catch (Throwable e) {}
}
public String assignCrc() throws Exception
{

View File

@@ -57,7 +57,6 @@ public class OpenOfficeMetadataExtracter extends AbstractMetadataExtracter
};
private OpenOfficeConnection connection;
private boolean isConnected;
public OpenOfficeMetadataExtracter()
{
@@ -69,6 +68,25 @@ public class OpenOfficeMetadataExtracter extends AbstractMetadataExtracter
this.connection = connection;
}
private synchronized void connect()
{
if (isConnected())
{
// just leave it
}
else
{
try
{
connection.connect();
}
catch (ConnectException e)
{
logger.warn(e.getMessage());
}
}
}
/**
* Initialises the bean by establishing an UNO connection
*/
@@ -76,18 +94,14 @@ public class OpenOfficeMetadataExtracter extends AbstractMetadataExtracter
{
PropertyCheck.mandatory("OpenOfficeMetadataExtracter", "connection", connection);
// attempt to make an connection
try
// attempt a connection
connect();
if (isConnected())
{
connection.connect();
isConnected = true;
// register
// Only register if the connection is available initially. Reconnections are only supported
// if the server is able to connection initially.
super.register();
}
catch (ConnectException e)
{
isConnected = false;
}
}
/**
@@ -96,7 +110,7 @@ public class OpenOfficeMetadataExtracter extends AbstractMetadataExtracter
*/
public boolean isConnected()
{
return isConnected;
return connection.isConnected();
}
public void extractInternal(ContentReader reader, final Map<QName, Serializable> destination) throws Throwable

View File

@@ -53,14 +53,12 @@ public class OpenOfficeContentTransformer extends AbstractContentTransformer
private static Log logger = LogFactory.getLog(OpenOfficeContentTransformer.class);
private OpenOfficeConnection connection;
private boolean connected;
private OpenOfficeDocumentConverter converter;
private String documentFormatsConfiguration;
private DocumentFormatRegistry formatRegistry;
public OpenOfficeContentTransformer()
{
this.connected = false;
}
public void setConnection(OpenOfficeConnection connection)
@@ -80,20 +78,25 @@ public class OpenOfficeContentTransformer extends AbstractContentTransformer
public boolean isConnected()
{
return connected;
return connection.isConnected();
}
private synchronized void connect()
{
try
if (isConnected())
{
connection.connect();
connected = true;
// just leave it
}
catch (ConnectException e)
else
{
logger.warn(e.getMessage());
connected = false;
try
{
connection.connect();
}
catch (ConnectException e)
{
logger.warn(e.getMessage());
}
}
}
@@ -128,9 +131,10 @@ public class OpenOfficeContentTransformer extends AbstractContentTransformer
formatRegistry = new XmlDocumentFormatRegistry();
}
if (connected)
if (isConnected())
{
// register
// If the server starts with OO running, then it will attempt reconnections. Otherwise it will
// just be wasting time trying to see if a connection is available all the time.
super.register();
}
}
@@ -140,7 +144,7 @@ public class OpenOfficeContentTransformer extends AbstractContentTransformer
*/
public double getReliability(String sourceMimetype, String targetMimetype)
{
if (!connected)
if (!isConnected())
{
return 0.0;
}

View File

@@ -70,7 +70,7 @@ public class HibernateNodeTest extends BaseSpringTest
{
store = new StoreImpl();
StoreKey storeKey = new StoreKey(StoreRef.PROTOCOL_WORKSPACE,
"TestWorkspace@" + System.currentTimeMillis() + " - " + System.nanoTime());
"TestWorkspace@" + getName() + " - " + System.currentTimeMillis());
store.setKey(storeKey);
// persist so that it is present in the hibernate cache
getSession().save(store);

View File

@@ -220,7 +220,7 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
Node node = store.getRootNode();
if (node == null)
{
throw new InvalidStoreRefException("Store does not have a root node", storeRef);
throw new InvalidStoreRefException("Store does not have a root node: " + storeRef, storeRef);
}
NodeRef nodeRef = node.getNodeRef();
// done

View File

@@ -37,6 +37,7 @@ import org.alfresco.service.cmr.avm.AVMNodeDescriptor;
import org.alfresco.service.cmr.avm.AVMService;
import org.alfresco.service.cmr.avmsync.AVMDifference;
import org.alfresco.service.cmr.avmsync.AVMSyncService;
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
import org.alfresco.service.cmr.model.FileFolderService;
import org.alfresco.service.cmr.model.FileInfo;
import org.alfresco.service.cmr.repository.ContentWriter;
@@ -86,6 +87,7 @@ public class WorkflowInterpreter
private WorkflowDefinition currentWorkflowDef = null;
private WorkflowPath currentPath = null;
private String currentDeploy = null;
private String username = "admin";
/**
* Last command issued
@@ -105,7 +107,6 @@ public class WorkflowInterpreter
{
ApplicationContext context = ApplicationContextHelper.getApplicationContext();
WorkflowInterpreter console = (WorkflowInterpreter)context.getBean("workflowInterpreter");
AuthenticationUtil.setSystemUserAsCurrentUser();
console.rep();
System.exit(0);
}
@@ -179,16 +180,20 @@ public class WorkflowInterpreter
*/
public void rep()
{
// accept commands
while (true)
{
System.out.print("ok> ");
try
{
String line = fIn.readLine();
// get command
final String line = fIn.readLine();
if (line.equals("exit") || line.equals("quit"))
{
return;
}
// execute command in context of currently selected user
long startms = System.currentTimeMillis();
System.out.print(interpretCommand(line));
System.out.println("" + (System.currentTimeMillis() - startms) + "ms");
@@ -204,12 +209,31 @@ public class WorkflowInterpreter
/**
* Interpret a single command using the BufferedReader passed in for any data needed.
*
* @param line The unparsed command
* @return The textual output of the command.
*/
public String interpretCommand(final String line)
throws IOException
{
// execute command in context of currently selected user
return AuthenticationUtil.runAs(new RunAsWork<String>()
{
public String doWork() throws Exception
{
return executeCommand(line);
}
}, username);
}
/**
* Execute a single command using the BufferedReader passed in for any data needed.
*
* TODO: Use decent parser!
*
* @param line The unparsed command
* @return The textual output of the command.
*/
public String interpretCommand(String line)
private String executeCommand(String line)
throws IOException
{
String[] command = line.split(" ");
@@ -229,7 +253,7 @@ public class WorkflowInterpreter
{
return "No command entered yet.";
}
return "repeating command " + lastCommand + "\n\n" + interpretCommand(lastCommand);
return "repeating command " + lastCommand + "\n\n" + executeCommand(lastCommand);
}
// remember last command
@@ -500,7 +524,7 @@ public class WorkflowInterpreter
}
out.println("deployed definition id: " + def.id + " , name: " + def.name + " , title: " + def.title + " , version: " + def.version);
currentDeploy = command[1];
out.print(interpretCommand("use definition " + def.id));
out.print(executeCommand("use definition " + def.id));
}
else if (command[0].equals("redeploy"))
@@ -509,7 +533,7 @@ public class WorkflowInterpreter
{
return "nothing to redeploy\n";
}
out.print(interpretCommand("deploy " + currentDeploy));
out.print(executeCommand("deploy " + currentDeploy));
}
else if (command[0].equals("undeploy"))
@@ -527,7 +551,7 @@ public class WorkflowInterpreter
workflowService.undeployDefinition(command[2]);
currentWorkflowDef = null;
currentPath = null;
out.print(interpretCommand("show definitions"));
out.print(executeCommand("show definitions"));
}
}
@@ -554,7 +578,7 @@ public class WorkflowInterpreter
}
currentWorkflowDef = def;
currentPath = null;
out.print(interpretCommand("use"));
out.print(executeCommand("use"));
}
else if (command[1].equals("workflow"))
@@ -566,7 +590,7 @@ public class WorkflowInterpreter
WorkflowInstance instance = workflowService.getWorkflowById(command[2]);
currentWorkflowDef = instance.definition;
currentPath = workflowService.getWorkflowPaths(instance.id).get(0);
out.print(interpretCommand("use"));
out.print(executeCommand("use"));
}
else
{
@@ -579,9 +603,9 @@ public class WorkflowInterpreter
{
if (command.length == 2)
{
AuthenticationUtil.setCurrentUser(command[1]);
username = command[1];
}
out.println("using user " + AuthenticationUtil.getCurrentUserName());
out.println("using user " + username);
}
else if (command[0].equals("start"))
@@ -942,7 +966,7 @@ public class WorkflowInterpreter
*/
public String getCurrentUserName()
{
return AuthenticationUtil.getCurrentUserName();
return username;
}
}