From e738ddfdf1551f182c0c6517981c09eed3b1c5f1 Mon Sep 17 00:00:00 2001 From: Derek Hulley Date: Fri, 1 Sep 2006 17:06:07 +0000 Subject: [PATCH] Schema script auto-application Split NodeStatus transaction out into a shared entity and also record server info Beginnings of changes required for index rebuilding, both full and incremental git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@3654 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261 --- config/alfresco/application-context.xml | 2 +- config/alfresco/bootstrap-context.xml | 25 + config/alfresco/core-services-context.xml | 4 +- .../alfresco/domain/hibernate-cfg.properties | 4 +- .../custom-db-connection.properties.sample | 1 + config/alfresco/hibernate-context.xml | 8 +- config/alfresco/index-recovery-context.xml | 47 +- .../messages/patch-service.properties | 3 + .../messages/schema-update.properties | 8 + .../alfresco/patch/patch-services-context.xml | 22 +- config/alfresco/repository.properties | 1 + config/alfresco/version.properties | 2 +- .../repo/admin/patch/PatchExecuter.java | 2 +- .../patch/impl/SchemaUpgradeScriptPatch.java | 27 +- .../org/alfresco/repo/domain/NodeStatus.java | 4 +- .../java/org/alfresco/repo/domain/Server.java | 33 + .../org/alfresco/repo/domain/Transaction.java | 35 + .../domain/hibernate/HibernateNodeTest.java | 27 +- .../repo/domain/hibernate/Node.hbm.xml | 78 +- .../repo/domain/hibernate/NodeStatusImpl.java | 14 +- .../repo/domain/hibernate/ServerImpl.java | 76 ++ .../repo/domain/hibernate/Transaction.hbm.xml | 62 ++ .../domain/hibernate/TransactionImpl.java | 88 +++ .../repo/domain/schema/SchemaBootstrap.java | 529 +++++++++++++ .../repo/node/BaseNodeServiceTest.java | 5 +- .../repo/node/db/DbNodeServiceImpl.java | 6 +- .../HibernateNodeDaoServiceImpl.java | 154 +++- .../index/FullIndexRecoveryComponent.java | 131 +--- .../index/FullIndexRecoveryComponentTest.java | 2 +- .../index/MissingContentReindexComponent.java | 741 ++++++++++++++++++ .../util/debug/MethodCallLogAdvice.java | 136 ---- 31 files changed, 1926 insertions(+), 351 deletions(-) create mode 100644 config/alfresco/messages/schema-update.properties create mode 100644 source/java/org/alfresco/repo/domain/Server.java create mode 100644 source/java/org/alfresco/repo/domain/Transaction.java create mode 100644 source/java/org/alfresco/repo/domain/hibernate/ServerImpl.java create mode 100644 source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml create mode 100644 source/java/org/alfresco/repo/domain/hibernate/TransactionImpl.java create mode 100644 source/java/org/alfresco/repo/domain/schema/SchemaBootstrap.java create mode 100644 source/java/org/alfresco/repo/node/index/MissingContentReindexComponent.java delete mode 100644 source/java/org/alfresco/util/debug/MethodCallLogAdvice.java diff --git a/config/alfresco/application-context.xml b/config/alfresco/application-context.xml index c9e73e01af..2e7a4b3aaf 100644 --- a/config/alfresco/application-context.xml +++ b/config/alfresco/application-context.xml @@ -22,8 +22,8 @@ - + diff --git a/config/alfresco/bootstrap-context.xml b/config/alfresco/bootstrap-context.xml index c12b56d2f7..aa61568bb4 100644 --- a/config/alfresco/bootstrap-context.xml +++ b/config/alfresco/bootstrap-context.xml @@ -25,6 +25,31 @@ + + + + + + + ${db.schema.update} + + + + classpath:alfresco/dbscripts/create/1.4/${db.script.dialect}/sample.sql + + + + + + + + + + + + + + diff --git a/config/alfresco/core-services-context.xml b/config/alfresco/core-services-context.xml index 90d0af668f..4e38f52100 100644 --- a/config/alfresco/core-services-context.xml +++ b/config/alfresco/core-services-context.xml @@ -60,9 +60,6 @@ - - - @@ -79,6 +76,7 @@ alfresco.messages.template-service alfresco.messages.lock-service alfresco.messages.patch-service + alfresco.messages.schema-update alfresco.messages.webdav-messages diff --git a/config/alfresco/domain/hibernate-cfg.properties b/config/alfresco/domain/hibernate-cfg.properties index 8c37e5334d..5da910a9cd 100644 --- a/config/alfresco/domain/hibernate-cfg.properties +++ b/config/alfresco/domain/hibernate-cfg.properties @@ -1,10 +1,10 @@ # # Hibernate configuration # -hibernate.jdbc.use_streams_for_binary=true hibernate.dialect=org.hibernate.dialect.MySQLInnoDBDialect + +hibernate.jdbc.use_streams_for_binary=true hibernate.show_sql=false -hibernate.hbm2ddl.auto=update hibernate.cache.use_query_cache=true hibernate.max_fetch_depth=10 hibernate.cache.provider_class=org.alfresco.repo.cache.InternalEhCacheManagerFactoryBean diff --git a/config/alfresco/extension/custom-db-connection.properties.sample b/config/alfresco/extension/custom-db-connection.properties.sample index d09219b796..175328c81f 100644 --- a/config/alfresco/extension/custom-db-connection.properties.sample +++ b/config/alfresco/extension/custom-db-connection.properties.sample @@ -2,6 +2,7 @@ # Sample database connection properties # +#db.schema.update=true #db.username=alfresco #db.password=alfresco #db.pool.initial=10 diff --git a/config/alfresco/hibernate-context.xml b/config/alfresco/hibernate-context.xml index e2b0deff99..db073779f6 100644 --- a/config/alfresco/hibernate-context.xml +++ b/config/alfresco/hibernate-context.xml @@ -30,6 +30,9 @@ + + false + @@ -38,6 +41,7 @@ org/alfresco/repo/domain/hibernate/Node.hbm.xml org/alfresco/repo/domain/hibernate/Store.hbm.xml + org/alfresco/repo/domain/hibernate/Transaction.hbm.xml org/alfresco/repo/domain/hibernate/VersionCount.hbm.xml org/alfresco/repo/domain/hibernate/AppliedPatch.hbm.xml org/alfresco/repo/domain/hibernate/Permission.hbm.xml @@ -150,6 +154,8 @@ ${cache.strategy} ${cache.strategy} ${cache.strategy} + ${cache.strategy} + ${cache.strategy} ${cache.strategy} ${cache.strategy} @@ -204,7 +210,7 @@ - + diff --git a/config/alfresco/index-recovery-context.xml b/config/alfresco/index-recovery-context.xml index bad12938ae..31d9566062 100644 --- a/config/alfresco/index-recovery-context.xml +++ b/config/alfresco/index-recovery-context.xml @@ -22,17 +22,11 @@ - - - workspace://SpacesStore - workspace://lightWeightVersionStore - user://alfrescoUserStore - - + false @@ -47,4 +41,43 @@ + + + + \ No newline at end of file diff --git a/config/alfresco/messages/patch-service.properties b/config/alfresco/messages/patch-service.properties index d98071a699..c2a5bf70ef 100644 --- a/config/alfresco/messages/patch-service.properties +++ b/config/alfresco/messages/patch-service.properties @@ -16,6 +16,9 @@ patch.general.property_not_set=Patch property ''{0}'' has not been set on this p # Individual patch messages +patch.marker.description=Marker patch to record installations and upgrades +patch.marker.result=Marker patch applied + patch.savedSearchesFolder.description=Ensures the existence of the 'Saved Searches' folder. patch.savedSearchesFolder.result.exists=The saved searches folder already exists: {0} patch.savedSearchesFolder.result.created=The saved searches folder was successfully created: {0} diff --git a/config/alfresco/messages/schema-update.properties b/config/alfresco/messages/schema-update.properties new file mode 100644 index 0000000000..cbf989adc2 --- /dev/null +++ b/config/alfresco/messages/schema-update.properties @@ -0,0 +1,8 @@ +# Schema update messages + +schema.update.msg.executing_script=Executing schema upgrade script: {0} +schema.update.err.update_failed=Schema auto-update failed +schema.update.err.validation_failed=Schema validation failed +schema.update.err.update_script_not_run=The following schema upgrade script needs to be executed manually: {0} +schema.update.err.script_not_found=The schema script could not be found at location {0} +schema.update.err.statement_terminator=Scripts must terminate all statements with '';'' (line {0} of {1}). \ No newline at end of file diff --git a/config/alfresco/patch/patch-services-context.xml b/config/alfresco/patch/patch-services-context.xml index 0241ae4919..dda2c969e9 100644 --- a/config/alfresco/patch/patch-services-context.xml +++ b/config/alfresco/patch/patch-services-context.xml @@ -441,12 +441,12 @@ patch.schemaUpdateScript-V1.4-1 - patch.patch.schemaUpgradeScriptPatch.description + patch.schemaUpgradeScript.description 0 19 20 - - AlfrescoSchemaUpdate-1.4-1-xxx.sql + + classpath:alfresco/dbscripts/upgrade/1.4/${db.script.dialect}/AlfrescoSchemaUpdate-1.4-1.sql @@ -472,5 +472,21 @@ + + patch.schemaUpdateScript-V1.4-2 + patch.schemaUpgradeScript.description + 0 + 20 + 21 + + classpath:alfresco/dbscripts/upgrade/1.4/${db.script.dialect}/AlfrescoSchemaUpdate-1.4-2.sql + + + + + + + + diff --git a/config/alfresco/repository.properties b/config/alfresco/repository.properties index f4d4c7020f..db2992c9f8 100644 --- a/config/alfresco/repository.properties +++ b/config/alfresco/repository.properties @@ -56,6 +56,7 @@ lucene.lock.poll.interval=100 # Database configuration +db.schema.update=true db.driver=org.gjt.mm.mysql.Driver db.name=alfresco db.url=jdbc:mysql:///${db.name} diff --git a/config/alfresco/version.properties b/config/alfresco/version.properties index 3936840fea..87530803c5 100644 --- a/config/alfresco/version.properties +++ b/config/alfresco/version.properties @@ -19,4 +19,4 @@ version.build=@build-number@ # Schema number -version.schema=20 +version.schema=21 diff --git a/source/java/org/alfresco/repo/admin/patch/PatchExecuter.java b/source/java/org/alfresco/repo/admin/patch/PatchExecuter.java index 12247e4b77..d0f2655d69 100644 --- a/source/java/org/alfresco/repo/admin/patch/PatchExecuter.java +++ b/source/java/org/alfresco/repo/admin/patch/PatchExecuter.java @@ -60,7 +60,7 @@ public class PatchExecuter implements ApplicationListener { logger.info(I18NUtil.getMessage(MSG_CHECKING)); - Date before = new Date(System.currentTimeMillis() - 20000L); // 20 seconds ago + Date before = new Date(System.currentTimeMillis() - 60000L); // 60 seconds ago patchService.applyOutstandingPatches(); Date after = new Date(System .currentTimeMillis() + 20000L); // 20 seconds ahead diff --git a/source/java/org/alfresco/repo/admin/patch/impl/SchemaUpgradeScriptPatch.java b/source/java/org/alfresco/repo/admin/patch/impl/SchemaUpgradeScriptPatch.java index 69dfd1e092..a68dc08fec 100644 --- a/source/java/org/alfresco/repo/admin/patch/impl/SchemaUpgradeScriptPatch.java +++ b/source/java/org/alfresco/repo/admin/patch/impl/SchemaUpgradeScriptPatch.java @@ -20,7 +20,7 @@ import org.alfresco.repo.admin.patch.AbstractPatch; import org.alfresco.service.cmr.admin.PatchException; /** - * This patch ensures that an upgrade script has been executed. Upgrade scripts + * This patch ensures that an upgrade scriptUrl has been executed. Upgrade scripts * should create an entry for the patch with the required ID and execution status * so that the code in this class is never called. If called, an exception message * is always generated. @@ -31,26 +31,37 @@ public class SchemaUpgradeScriptPatch extends AbstractPatch { private static final String MSG_NOT_EXECUTED = "patch.schemaUpgradeScript.err.not_executed"; - private String scriptName; + private String scriptUrl; public SchemaUpgradeScriptPatch() { } + + /** + * @return Returns the URL of the scriptUrl that has to have been run + */ + public String getScriptUrl() + { + return scriptUrl; + } /** - * Set the name of the upgrade script to execute. + * Set the URL of the upgrade scriptUrl to execute. This is the full URL of the + * file, e.g. classpath:alfresco/patch/scripts/upgrade-1.4/${hibernate.dialect.class}/patchAlfrescoSchemaUpdate-1.4-2.sql + * where the ${hibernate.dialect.class} placeholder will be substituted with the Hibernate + * Dialect as configured for the system. * - * @param scriptName the script filename + * @param scriptUrl the scriptUrl filename */ - public void setScriptName(String scriptName) + public void setScriptUrl(String script) { - this.scriptName = scriptName; + this.scriptUrl = script; } protected void checkProperties() { super.checkProperties(); - checkPropertyNotNull(scriptName, "scriptName"); + checkPropertyNotNull(scriptUrl, "scriptUrl"); } /** @@ -59,6 +70,6 @@ public class SchemaUpgradeScriptPatch extends AbstractPatch @Override protected String applyInternal() throws Exception { - throw new PatchException(MSG_NOT_EXECUTED, scriptName); + throw new PatchException(MSG_NOT_EXECUTED, scriptUrl); } } diff --git a/source/java/org/alfresco/repo/domain/NodeStatus.java b/source/java/org/alfresco/repo/domain/NodeStatus.java index 4df06d8cf1..53969c0a69 100644 --- a/source/java/org/alfresco/repo/domain/NodeStatus.java +++ b/source/java/org/alfresco/repo/domain/NodeStatus.java @@ -41,9 +41,9 @@ public interface NodeStatus public void setNode(Node node); - public String getChangeTxnId(); + public Transaction getTransaction(); - public void setChangeTxnId(String txnId); + public void setTransaction(Transaction transaction); public boolean isDeleted(); } diff --git a/source/java/org/alfresco/repo/domain/Server.java b/source/java/org/alfresco/repo/domain/Server.java new file mode 100644 index 0000000000..f07c70b423 --- /dev/null +++ b/source/java/org/alfresco/repo/domain/Server.java @@ -0,0 +1,33 @@ +/* + * Copyright (C) 2006 Alfresco, Inc. + * + * Licensed under the Mozilla Public License version 1.1 + * with a permitted attribution clause. You may obtain a + * copy of the License at + * + * http://www.alfresco.org/legal/license.txt + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, + * either express or implied. See the License for the specific + * language governing permissions and limitations under the + * License. + */ +package org.alfresco.repo.domain; + +/** + * Interface for persistent server objects. These persist + * details of the servers that have committed transactions to the + * database, for instance. + * + * @author Derek Hulley + */ +public interface Server +{ + public Long getId(); + + public String getIpAddress(); + + public void setIpAddress(String ipAddress); +} diff --git a/source/java/org/alfresco/repo/domain/Transaction.java b/source/java/org/alfresco/repo/domain/Transaction.java new file mode 100644 index 0000000000..42633a70e7 --- /dev/null +++ b/source/java/org/alfresco/repo/domain/Transaction.java @@ -0,0 +1,35 @@ +/* + * Copyright (C) 2006 Alfresco, Inc. + * + * Licensed under the Mozilla Public License version 1.1 + * with a permitted attribution clause. You may obtain a + * copy of the License at + * + * http://www.alfresco.org/legal/license.txt + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, + * either express or implied. See the License for the specific + * language governing permissions and limitations under the + * License. + */ +package org.alfresco.repo.domain; + +/** + * Interface for persistent transaction objects. + * + * @author Derek Hulley + */ +public interface Transaction +{ + public Long getId(); + + public String getChangeTxnId(); + + public void setChangeTxnId(String changeTxnId); + + public Server getServer(); + + public void setServer(Server server); +} diff --git a/source/java/org/alfresco/repo/domain/hibernate/HibernateNodeTest.java b/source/java/org/alfresco/repo/domain/hibernate/HibernateNodeTest.java index 47014f7587..fc9f9535ba 100644 --- a/source/java/org/alfresco/repo/domain/hibernate/HibernateNodeTest.java +++ b/source/java/org/alfresco/repo/domain/hibernate/HibernateNodeTest.java @@ -31,8 +31,10 @@ import org.alfresco.repo.domain.Node; import org.alfresco.repo.domain.NodeKey; import org.alfresco.repo.domain.NodeStatus; import org.alfresco.repo.domain.PropertyValue; +import org.alfresco.repo.domain.Server; import org.alfresco.repo.domain.Store; import org.alfresco.repo.domain.StoreKey; +import org.alfresco.repo.domain.Transaction; import org.alfresco.repo.transaction.AlfrescoTransactionSupport; import org.alfresco.service.cmr.dictionary.DataTypeDefinition; import org.alfresco.service.cmr.repository.StoreRef; @@ -53,8 +55,11 @@ import org.hibernate.exception.ConstraintViolationException; public class HibernateNodeTest extends BaseSpringTest { private static final String TEST_NAMESPACE = "http://www.alfresco.org/test/HibernateNodeTest"; + private static int i = 0; private Store store; + private Server server; + private Transaction transaction; public HibernateNodeTest() { @@ -68,6 +73,18 @@ public class HibernateNodeTest extends BaseSpringTest store.setKey(storeKey); // persist so that it is present in the hibernate cache getSession().save(store); + + server = (Server) getSession().get(ServerImpl.class, new Long(1)); + if (server == null) + { + server = new ServerImpl(); + server.setIpAddress("" + "i_" + System.currentTimeMillis()); + getSession().save(server); + } + transaction = new TransactionImpl(); + transaction.setServer(server); + transaction.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); + getSession().save(transaction); } protected void onTearDownInTransaction() @@ -108,7 +125,7 @@ public class HibernateNodeTest extends BaseSpringTest // create the node status NodeStatus nodeStatus = new NodeStatusImpl(); nodeStatus.setKey(key); - nodeStatus.setChangeTxnId("txn:123"); + nodeStatus.setTransaction(transaction); getSession().save(nodeStatus); // create a new Node @@ -131,7 +148,7 @@ public class HibernateNodeTest extends BaseSpringTest node = nodeStatus.getNode(); assertNotNull("Node was not attached to status", node); // change the values - nodeStatus.setChangeTxnId("txn:456"); + transaction.setChangeTxnId("txn:456"); // delete the node getSession().delete(node); @@ -351,7 +368,7 @@ public class HibernateNodeTest extends BaseSpringTest NodeStatus containerNodeStatus = new NodeStatusImpl(); containerNodeStatus.setKey(containerNodeKey); containerNodeStatus.setNode(containerNode); - containerNodeStatus.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); + containerNodeStatus.setTransaction(transaction); getSession().save(containerNodeStatus); // make content node 1 Node contentNode1 = new NodeImpl(); @@ -366,7 +383,7 @@ public class HibernateNodeTest extends BaseSpringTest NodeStatus contentNodeStatus1 = new NodeStatusImpl(); contentNodeStatus1.setKey(contentNodeKey1); contentNodeStatus1.setNode(contentNode1); - contentNodeStatus1.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); + contentNodeStatus1.setTransaction(transaction); getSession().save(contentNodeStatus1); // make content node 2 Node contentNode2 = new NodeImpl(); @@ -381,7 +398,7 @@ public class HibernateNodeTest extends BaseSpringTest NodeStatus contentNodeStatus2 = new NodeStatusImpl(); contentNodeStatus2.setKey(contentNodeKey2); contentNodeStatus2.setNode(contentNode2); - contentNodeStatus2.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); + contentNodeStatus2.setTransaction(transaction); getSession().save(contentNodeStatus2); // create an association to content 1 ChildAssoc assoc1 = new ChildAssocImpl(); diff --git a/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml b/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml index 79485306d3..15fe0ed614 100644 --- a/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml +++ b/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml @@ -113,6 +113,16 @@ + + - - - - - - - - - - - - + + + + + + + + + + - @@ -309,25 +316,26 @@ select distinct - status.changeTxnId + transaction.changeTxnId from - org.alfresco.repo.domain.hibernate.NodeStatusImpl as status + org.alfresco.repo.domain.hibernate.TransactionImpl as transaction where - status.changeTxnId > :currentTxnId + transaction.changeTxnId > :currentTxnId order by - status.changeTxnId + transaction.changeTxnId select - count(status.changeTxnId) + count(transaction.changeTxnId) from org.alfresco.repo.domain.hibernate.NodeStatusImpl as status + join status.transaction as transaction where status.key.protocol = :storeProtocol and status.key.identifier = :storeIdentifier and status.node.id is not null and - status.changeTxnId = :changeTxnId + transaction.changeTxnId = :changeTxnId @@ -335,11 +343,12 @@ status from org.alfresco.repo.domain.hibernate.NodeStatusImpl as status + join status.transaction as transaction where status.key.protocol = :storeProtocol and status.key.identifier = :storeIdentifier and status.node.id is not null and - status.changeTxnId = :changeTxnId + transaction.changeTxnId = :changeTxnId @@ -347,11 +356,12 @@ status from org.alfresco.repo.domain.hibernate.NodeStatusImpl as status + join status.transaction as transaction where status.key.protocol = :storeProtocol and status.key.identifier = :storeIdentifier and status.node.id is null and - status.changeTxnId = :changeTxnId + transaction.changeTxnId = :changeTxnId diff --git a/source/java/org/alfresco/repo/domain/hibernate/NodeStatusImpl.java b/source/java/org/alfresco/repo/domain/hibernate/NodeStatusImpl.java index 77ea062ebb..e5ceca5843 100644 --- a/source/java/org/alfresco/repo/domain/hibernate/NodeStatusImpl.java +++ b/source/java/org/alfresco/repo/domain/hibernate/NodeStatusImpl.java @@ -21,6 +21,7 @@ import java.io.Serializable; import org.alfresco.repo.domain.Node; import org.alfresco.repo.domain.NodeKey; import org.alfresco.repo.domain.NodeStatus; +import org.alfresco.repo.domain.Transaction; import org.alfresco.util.EqualsHelper; /** @@ -34,15 +35,16 @@ public class NodeStatusImpl implements NodeStatus, Serializable private NodeKey key; private Node node; - private String changeTxnId; + private Transaction transaction; + @Override public String toString() { StringBuilder sb = new StringBuilder(50); sb.append("NodeStatus") .append("[key=").append(key) .append(", node=").append(node == null ? null : node.getNodeRef()) - .append(", txn=").append(changeTxnId) + .append(", txn=").append(transaction) .append("]"); return sb.toString(); } @@ -85,14 +87,14 @@ public class NodeStatusImpl implements NodeStatus, Serializable this.node = node; } - public String getChangeTxnId() + public Transaction getTransaction() { - return changeTxnId; + return transaction; } - public void setChangeTxnId(String txnId) + public void setTransaction(Transaction transaction) { - this.changeTxnId = txnId; + this.transaction = transaction; } public boolean isDeleted() diff --git a/source/java/org/alfresco/repo/domain/hibernate/ServerImpl.java b/source/java/org/alfresco/repo/domain/hibernate/ServerImpl.java new file mode 100644 index 0000000000..0fab53f8a5 --- /dev/null +++ b/source/java/org/alfresco/repo/domain/hibernate/ServerImpl.java @@ -0,0 +1,76 @@ +/* + * Copyright (C) 2005 Alfresco, Inc. + * + * Licensed under the Mozilla Public License version 1.1 + * with a permitted attribution clause. You may obtain a + * copy of the License at + * + * http://www.alfresco.org/legal/license.txt + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, + * either express or implied. See the License for the specific + * language governing permissions and limitations under the + * License. + */ +package org.alfresco.repo.domain.hibernate; + +import java.io.Serializable; + +import org.alfresco.repo.domain.Server; + +/** + * Bean containing all the persistence data representing a Server. + *

+ * This implementation of the {@link org.alfresco.repo.domain.Service Service} interface is + * Hibernate specific. + * + * @author Derek Hulley + */ +public class ServerImpl extends LifecycleAdapter implements Server, Serializable +{ + private static final long serialVersionUID = 8063452519040344479L; + + private Long id; + private String ipAddress; + + public ServerImpl() + { + } + + @Override + public String toString() + { + StringBuilder sb = new StringBuilder(50); + sb.append("Server") + .append("[id=").append(id) + .append(", ipAddress=").append(ipAddress) + .append("]"); + return sb.toString(); + } + + public Long getId() + { + return id; + } + + /** + * For Hibernate use + */ + @SuppressWarnings("unused") + private void setId(Long id) + { + this.id = id; + } + + public String getIpAddress() + { + return ipAddress; + } + + public void setIpAddress(String ipAddress) + { + this.ipAddress = ipAddress; + } +} diff --git a/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml b/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml new file mode 100644 index 0000000000..e770f3f26c --- /dev/null +++ b/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml @@ -0,0 +1,62 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + select + server + from + org.alfresco.repo.domain.hibernate.ServerImpl as server + where + server.ipAddress = :ipAddress + + + diff --git a/source/java/org/alfresco/repo/domain/hibernate/TransactionImpl.java b/source/java/org/alfresco/repo/domain/hibernate/TransactionImpl.java new file mode 100644 index 0000000000..21a26946ea --- /dev/null +++ b/source/java/org/alfresco/repo/domain/hibernate/TransactionImpl.java @@ -0,0 +1,88 @@ +/* + * Copyright (C) 2005 Alfresco, Inc. + * + * Licensed under the Mozilla Public License version 1.1 + * with a permitted attribution clause. You may obtain a + * copy of the License at + * + * http://www.alfresco.org/legal/license.txt + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, + * either express or implied. See the License for the specific + * language governing permissions and limitations under the + * License. + */ +package org.alfresco.repo.domain.hibernate; + +import java.io.Serializable; + +import org.alfresco.repo.domain.Server; +import org.alfresco.repo.domain.Transaction; + +/** + * Bean containing all the persistence data representing a Transaction. + *

+ * This implementation of the {@link org.alfresco.repo.domain.Transaction Transaction} interface is + * Hibernate specific. + * + * @author Derek Hulley + */ +public class TransactionImpl extends LifecycleAdapter implements Transaction, Serializable +{ + private static final long serialVersionUID = -8264339795578077552L; + + private Long id; + private String changeTxnId; + private Server server; + + public TransactionImpl() + { + } + + @Override + public String toString() + { + StringBuilder sb = new StringBuilder(50); + sb.append("Transaction") + .append("[id=").append(id) + .append(", changeTxnId=").append(changeTxnId) + .append("]"); + return sb.toString(); + } + + public Long getId() + { + return id; + } + + /** + * For Hibernate use + */ + @SuppressWarnings("unused") + private void setId(Long id) + { + this.id = id; + } + + public String getChangeTxnId() + { + return changeTxnId; + } + + public void setChangeTxnId(String changeTransactionId) + { + this.changeTxnId = changeTransactionId; + } + + public Server getServer() + { + return server; + } + + public void setServer(Server server) + { + this.server = server; + } +} diff --git a/source/java/org/alfresco/repo/domain/schema/SchemaBootstrap.java b/source/java/org/alfresco/repo/domain/schema/SchemaBootstrap.java new file mode 100644 index 0000000000..e17a13e6ae --- /dev/null +++ b/source/java/org/alfresco/repo/domain/schema/SchemaBootstrap.java @@ -0,0 +1,529 @@ +/* + * Copyright (C) 2006 Alfresco, Inc. + * + * Licensed under the Mozilla Public License version 1.1 + * with a permitted attribution clause. You may obtain a + * copy of the License at + * + * http://www.alfresco.org/legal/license.txt + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, + * either express or implied. See the License for the specific + * language governing permissions and limitations under the + * License. + */ +package org.alfresco.repo.domain.schema; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.InputStreamReader; +import java.io.Writer; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.List; + +import org.alfresco.error.AlfrescoRuntimeException; +import org.alfresco.i18n.I18NUtil; +import org.alfresco.repo.admin.patch.impl.SchemaUpgradeScriptPatch; +import org.alfresco.util.TempFileProvider; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.Session; +import org.hibernate.SessionFactory; +import org.hibernate.Transaction; +import org.hibernate.cfg.Configuration; +import org.hibernate.dialect.Dialect; +import org.hibernate.tool.hbm2ddl.DatabaseMetadata; +import org.hibernate.tool.hbm2ddl.SchemaExport; +import org.springframework.beans.BeansException; +import org.springframework.context.ApplicationEvent; +import org.springframework.context.ApplicationListener; +import org.springframework.context.event.ContextRefreshedEvent; +import org.springframework.orm.hibernate3.LocalSessionFactoryBean; +import org.springframework.util.ResourceUtils; + +/** + * Bootstraps the schema and schema update. The schema is considered missing if the applied patch table + * is not present, and the schema is considered empty if the applied patch table is empty. + * + * @author Derek Hulley + */ +public class SchemaBootstrap implements ApplicationListener +{ + /** The placeholder for the configured Dialect class name: ${db.script.dialect} */ + private static final String PLACEHOLDER_SCRIPT_DIALECT = "\\$\\{db\\.script\\.dialect\\}"; + + private static final String MSG_EXECUTING_SCRIPT = "schema.update.msg.executing_script"; + private static final String ERR_UPDATE_FAILED = "schema.update.err.update_failed"; + private static final String ERR_VALIDATION_FAILED = "schema.update.err.validation_failed"; + private static final String ERR_SCRIPT_NOT_RUN = "schema.update.err.update_script_not_run"; + private static final String ERR_SCRIPT_NOT_FOUND = "schema.update.err.script_not_found"; + private static final String ERR_STATEMENT_TERMINATOR = "schema.update.err.statement_terminator"; + + private static Log logger = LogFactory.getLog(SchemaBootstrap.class); + + private LocalSessionFactoryBean localSessionFactory; + private String schemaOuputFilename; + private boolean updateSchema; + private List postCreateScriptUrls; + private List validateUpdateScriptPatches; + private List applyUpdateScriptPatches; + + public SchemaBootstrap() + { + postCreateScriptUrls = new ArrayList(1); + validateUpdateScriptPatches = new ArrayList(4); + applyUpdateScriptPatches = new ArrayList(4); + } + + public void setLocalSessionFactory(LocalSessionFactoryBean localSessionFactory) throws BeansException + { + this.localSessionFactory = localSessionFactory; + } + + /** + * Set this to output the full database creation script + * + * @param schemaOuputFilename the name of a file to dump the schema to, or null to ignore + */ + public void setSchemaOuputFilename(String schemaOuputFilename) + { + this.schemaOuputFilename = schemaOuputFilename; + } + + /** + * Set whether to modify the schema or not. Either way, the schema will be validated. + * + * @param updateSchema true to update and validate the schema, otherwise false to just + * validate the schema. Default is true. + */ + public void setUpdateSchema(boolean updateSchema) + { + this.updateSchema = updateSchema; + } + + /** + * Set the scripts that must be executed after the schema has been created. + * + * @param postCreateScriptUrls file URLs + * + * @see #PLACEHOLDER_SCRIPT_DIALECT + */ + public void setPostCreateScriptUrls(List postUpdateScriptUrls) + { + this.postCreateScriptUrls = postUpdateScriptUrls; + } + + /** + * Set the schema script patches that must have been applied. These will not be + * applied to the database. These can be used where the script cannot be + * applied automatically or where a particular upgrade path is no longer supported. + * For example, at version 3.0, the upgrade scripts for version 1.4 may be considered + * unsupported - this doesn't prevent the manual application of the scripts, though. + * + * @param applyUpdateScriptPatches a list of schema patches to check + */ + public void setValidateUpdateScriptPatches(List scriptPatches) + { + this.validateUpdateScriptPatches = scriptPatches; + } + + /** + * Set the schema script patches that may be executed during an update. + * + * @param applyUpdateScriptPatches a list of schema patches to check + */ + public void setApplyUpdateScriptPatches(List scriptPatches) + { + this.applyUpdateScriptPatches = scriptPatches; + } + + public void onApplicationEvent(ApplicationEvent event) + { + if (!(event instanceof ContextRefreshedEvent)) + { + // only work on startup + return; + } + + // do everything in a transaction + Session session = getLocalSessionFactory().openSession(); + Transaction transaction = session.beginTransaction(); + try + { + // make sure that we don't autocommit + Connection connection = session.connection(); + connection.setAutoCommit(false); + + Configuration cfg = localSessionFactory.getConfiguration(); + // dump the schema, if required + if (schemaOuputFilename != null) + { + File schemaOutputFile = new File(schemaOuputFilename); + dumpSchemaCreate(cfg, schemaOutputFile); + } + + // update the schema, if required + if (updateSchema) + { + updateSchema(cfg, session, connection); + } + + // verify that all patches have been applied correctly + checkSchemaPatchScripts(cfg, session, connection, validateUpdateScriptPatches, false); // check scripts + checkSchemaPatchScripts(cfg, session, connection, applyUpdateScriptPatches, false); // check scripts + + // all done successfully + transaction.commit(); + } + catch (Throwable e) + { + try { transaction.rollback(); } catch (Throwable ee) {} + if (updateSchema) + { + throw new AlfrescoRuntimeException(ERR_UPDATE_FAILED, e); + } + else + { + throw new AlfrescoRuntimeException(ERR_VALIDATION_FAILED, e); + } + } + } + + private void dumpSchemaCreate(Configuration cfg, File schemaOutputFile) + { + // if the file exists, delete it + if (schemaOutputFile.exists()) + { + schemaOutputFile.delete(); + } + SchemaExport schemaExport = new SchemaExport(cfg) + .setFormat(true) + .setHaltOnError(true) + .setOutputFile(schemaOutputFile.getAbsolutePath()) + .setDelimiter(";"); + schemaExport.execute(false, false, false, true); + } + + private SessionFactory getLocalSessionFactory() + { + return (SessionFactory) localSessionFactory.getObject(); + } + + /** + * @return Returns the number of applied patches + */ + private int countAppliedPatches(Connection connection) throws Exception + { + Statement stmt = connection.createStatement(); + try + { + ResultSet rs = stmt.executeQuery("select count(id) from alf_applied_patch"); + rs.next(); + int count = rs.getInt(1); + return count; + } + catch (Throwable e) + { + // we'll try another table name + } + finally + { + try { stmt.close(); } catch (Throwable e) {} + } + // for pre-1.4 databases, the table was named differently + stmt = connection.createStatement(); + try + { + ResultSet rs = stmt.executeQuery("select count(id) from applied_patch"); + rs.next(); + int count = rs.getInt(1); + return count; + } + finally + { + try { stmt.close(); } catch (Throwable e) {} + } + } + + /** + * @return Returns the number of applied patches + */ + private boolean didPatchSucceed(Connection connection, String patchId) throws Exception + { + Statement stmt = connection.createStatement(); + try + { + ResultSet rs = stmt.executeQuery("select succeeded from alf_applied_patch where id = '" + patchId + "'"); + if (!rs.next()) + { + return false; + } + boolean succeeded = rs.getBoolean(1); + return succeeded; + } + catch (Throwable e) + { + // we'll try another table name + } + finally + { + try { stmt.close(); } catch (Throwable e) {} + } + // for pre-1.4 databases, the table was named differently + stmt = connection.createStatement(); + try + { + ResultSet rs = stmt.executeQuery("select succeeded from applied_patch where id = '" + patchId + "'"); + if (!rs.next()) + { + return false; + } + boolean succeeded = rs.getBoolean(1); + return succeeded; + } + finally + { + try { stmt.close(); } catch (Throwable e) {} + } + } + + /** + * Builds the schema from scratch or applies the necessary patches to the schema. + */ + private void updateSchema(Configuration cfg, Session session, Connection connection) throws Exception + { + boolean create = false; + try + { + countAppliedPatches(connection); + } + catch (Throwable e) + { + create = true; + } + if (create) + { + // the applied patch table is missing - we assume that all other tables are missing + // perform a full update using Hibernate-generated statements + File tempFile = TempFileProvider.createTempFile("AlfrescoSchemaCreate", ".sql"); + dumpSchemaCreate(cfg, tempFile); + executeScriptFile(cfg, connection, tempFile); + // execute post-create scripts (not patches) + for (String scriptUrl : this.postCreateScriptUrls) + { + executeScriptUrl(cfg, connection, scriptUrl); + } + } + else + { + // we have a database, so just run the update scripts + checkSchemaPatchScripts(cfg, session, connection, validateUpdateScriptPatches, false); // check for scripts that must have been run + checkSchemaPatchScripts(cfg, session, connection, applyUpdateScriptPatches, true); // execute scripts as required + // let Hibernate do any required updates + File tempFile = null; + Writer writer = null; + try + { + final Dialect dialect = Dialect.getDialect(cfg.getProperties()); + DatabaseMetadata metadata = new DatabaseMetadata(connection, dialect); + String[] sqls = cfg.generateSchemaUpdateScript(dialect, metadata); + if (sqls.length > 0) + { + tempFile = TempFileProvider.createTempFile("AlfrescoSchemaUpdate", ".sql"); + writer = new BufferedWriter(new FileWriter(tempFile)); + for (String sql : sqls) + { + writer.append(sql); + writer.append(";\n"); + } + } + } + finally + { + if (writer != null) + { + try {writer.close();} catch (Throwable e) {} + } + } + // execute if there were changes raised by Hibernate + if (tempFile != null) + { + executeScriptFile(cfg, connection, tempFile); + } + } + } + + /** + * Check that the necessary scripts have been executed against the database + */ + private void checkSchemaPatchScripts( + Configuration cfg, + Session session, + Connection connection, + List scriptPatches, + boolean apply) throws Exception + { + // first check if there have been any applied patches + int appliedPatchCount = countAppliedPatches(connection); + if (appliedPatchCount == 0) + { + // This is a new schema, so upgrade scripts are irrelevant + // and patches will not have been applied yet + return; + } + + for (SchemaUpgradeScriptPatch patch : scriptPatches) + { + final String patchId = patch.getId(); + final String scriptUrl = patch.getScriptUrl(); + + // check if the script was successfully executed + boolean wasSuccessfullyApplied = didPatchSucceed(connection, patchId); + if (wasSuccessfullyApplied) + { + // nothing to do - it has been done before + continue; + } + else if (!apply) + { + // the script was not run and may not be run automatically + throw AlfrescoRuntimeException.create(ERR_SCRIPT_NOT_RUN, scriptUrl); + } + // it wasn't run and it can be run now + executeScriptUrl(cfg, connection, scriptUrl); + } + } + + private void executeScriptUrl(Configuration cfg, Connection connection, String scriptUrl) throws Exception + { + Dialect dialect = Dialect.getDialect(cfg.getProperties()); + File scriptFile = getScriptFile(dialect.getClass(), scriptUrl); + // check that it exists + if (scriptFile == null) + { + throw AlfrescoRuntimeException.create(ERR_SCRIPT_NOT_FOUND, scriptUrl); + } + // now execute it + executeScriptFile(cfg, connection, scriptFile); + } + + /** + * Replaces the dialect placeholder in the script URL and attempts to find a file for + * it. If not found, the dialect hierarchy will be walked until a compatible script is + * found. This makes it possible to have scripts that are generic to all dialects. + * + * @return Returns the file if found, otherwise null + */ + private File getScriptFile(Class dialectClazz, String scriptUrl) throws Exception + { + // replace the dialect placeholder + String dialectScriptUrl = scriptUrl.replaceAll(PLACEHOLDER_SCRIPT_DIALECT, dialectClazz.getName()); + // get a handle on the resource + try + { + File scriptFile = ResourceUtils.getFile(dialectScriptUrl); + if (scriptFile.exists()) + { + // found a compatible dialect version + return scriptFile; + } + } + catch (FileNotFoundException e) + { + // doesn't exist + } + // it wasn't found. Get the superclass of the dialect and try again + Class superClazz = dialectClazz.getSuperclass(); + if (Dialect.class.isAssignableFrom(superClazz)) + { + // we still have a Dialect - try again + return getScriptFile(superClazz, scriptUrl); + } + else + { + // we have exhausted all options + return null; + } + } + + private void executeScriptFile(Configuration cfg, Connection connection, File scriptFile) throws Exception + { + logger.info(I18NUtil.getMessage(MSG_EXECUTING_SCRIPT, scriptFile)); + + BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(scriptFile), "UTF8")); + try + { + int line = 0; + // loop through all statements + StringBuilder sb = new StringBuilder(1024); + while(true) + { + String sql = reader.readLine(); + line++; + + if (sql == null) + { + // nothing left in the file + break; + } + + // trim it + sql = sql.trim(); + if (sql.length() == 0 || + sql.startsWith( "--" ) || + sql.startsWith( "//" ) || + sql.startsWith( "/*" ) ) + { + if (sb.length() > 0) + { + // we have an unterminated statement + throw AlfrescoRuntimeException.create(ERR_STATEMENT_TERMINATOR, (line - 1), scriptFile); + } + // there has not been anything to execute - it's just a comment line + continue; + } + // have we reached the end of a statement? + boolean execute = false; + if (sql.endsWith(";")) + { + sql = sql.substring(0, sql.length() - 1); + execute = true; + } + // append to the statement being built up + sb.append(" ").append(sql); + // execute, if required + if (execute) + { + Statement stmt = connection.createStatement(); + try + { + sql = sb.toString(); + if (logger.isDebugEnabled()) + { + logger.debug("Executing statment: " + sql); + } + stmt.execute(sql); + sb = new StringBuilder(1024); + } + finally + { + try { stmt.close(); } catch (Throwable e) {} + } + } + } + } + finally + { + try { reader.close(); } catch (Throwable e) {} + } + } +} diff --git a/source/java/org/alfresco/repo/node/BaseNodeServiceTest.java b/source/java/org/alfresco/repo/node/BaseNodeServiceTest.java index 0a5709f410..f6de57d293 100644 --- a/source/java/org/alfresco/repo/node/BaseNodeServiceTest.java +++ b/source/java/org/alfresco/repo/node/BaseNodeServiceTest.java @@ -1630,10 +1630,7 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest NodeRef defRef = pathDefRef.getChildRef(); // now browse down using the node service - NodeRef checkParentRef = nodeService.getChildByName(rootNodeRef, ASSOC_TYPE_QNAME_TEST_CHILDREN, parentRef.getId()); - assertNotNull("First level, non-named node not found", checkParentRef); - assertEquals(parentRef, checkParentRef); - NodeRef checkAbcRef = nodeService.getChildByName(checkParentRef, ASSOC_TYPE_QNAME_TEST_CONTAINS, "abc"); + NodeRef checkAbcRef = nodeService.getChildByName(parentRef, ASSOC_TYPE_QNAME_TEST_CONTAINS, "abc"); assertNotNull("Second level, named node 'ABC' not found", checkAbcRef); assertEquals(abcRef, checkAbcRef); NodeRef checkDefRef = nodeService.getChildByName(checkAbcRef, ASSOC_TYPE_QNAME_TEST_CONTAINS, "def"); diff --git a/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java b/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java index 04cdfacfb8..6a9f0993ca 100644 --- a/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java +++ b/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java @@ -148,7 +148,7 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl else { return new NodeRef.Status( - nodeStatus.getChangeTxnId(), + nodeStatus.getTransaction().getChangeTxnId(), nodeStatus.isDeleted()); } } @@ -1446,11 +1446,11 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl // update old status NodeStatus oldNodeStatus = nodeDaoService.getNodeStatus(oldNodeRef, true); oldNodeStatus.setNode(null); - oldNodeStatus.setChangeTxnId(txnId); + oldNodeStatus.getTransaction().setChangeTxnId(txnId); // create the new status NodeStatus newNodeStatus = nodeDaoService.getNodeStatus(newNodeRef, true); newNodeStatus.setNode(nodeToMove); - newNodeStatus.setChangeTxnId(txnId); + newNodeStatus.getTransaction().setChangeTxnId(txnId); } } diff --git a/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java b/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java index 6ecccd65d1..b2872cdeaf 100644 --- a/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java +++ b/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java @@ -16,9 +16,14 @@ */ package org.alfresco.repo.node.db.hibernate; +import java.io.Serializable; +import java.net.InetAddress; import java.util.ArrayList; import java.util.Collection; import java.util.List; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; +import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; import java.util.zip.CRC32; import org.alfresco.error.AlfrescoRuntimeException; @@ -28,13 +33,17 @@ import org.alfresco.repo.domain.Node; import org.alfresco.repo.domain.NodeAssoc; import org.alfresco.repo.domain.NodeKey; import org.alfresco.repo.domain.NodeStatus; +import org.alfresco.repo.domain.Server; import org.alfresco.repo.domain.Store; import org.alfresco.repo.domain.StoreKey; +import org.alfresco.repo.domain.Transaction; import org.alfresco.repo.domain.hibernate.ChildAssocImpl; import org.alfresco.repo.domain.hibernate.NodeAssocImpl; import org.alfresco.repo.domain.hibernate.NodeImpl; import org.alfresco.repo.domain.hibernate.NodeStatusImpl; +import org.alfresco.repo.domain.hibernate.ServerImpl; import org.alfresco.repo.domain.hibernate.StoreImpl; +import org.alfresco.repo.domain.hibernate.TransactionImpl; import org.alfresco.repo.node.db.NodeDaoService; import org.alfresco.repo.transaction.AlfrescoTransactionSupport; import org.alfresco.repo.transaction.TransactionalDao; @@ -71,9 +80,14 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements private static final String QUERY_GET_TARGET_ASSOCS = "node.GetTargetAssocs"; private static final String QUERY_GET_SOURCE_ASSOCS = "node.GetSourceAssocs"; private static final String QUERY_GET_CONTENT_DATA_STRINGS = "node.GetContentDataStrings"; + private static final String QUERY_GET_SERVER_BY_IPADDRESS = "server.getServerByIpAddress"; /** a uuid identifying this unique instance */ - private String uuid; + private final String uuid; + + private final ReadLock serverReadLock; + private final WriteLock serverWriteLock; + private Server server; /** * @@ -81,6 +95,10 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements public HibernateNodeDaoServiceImpl() { this.uuid = GUID.generate(); + + ReentrantReadWriteLock serverReadWriteLock = new ReentrantReadWriteLock(); + serverReadLock = serverReadWriteLock.readLock(); + serverWriteLock = serverReadWriteLock.writeLock(); } /** @@ -108,6 +126,93 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements return uuid.hashCode(); } + /** + * Gets/creates the server instance to use for the life of this instance + */ + private Server getServer() + { + // get readlock + serverReadLock.lock(); + try + { + if (server != null) + { + return server; + } + } + finally + { + serverReadLock.unlock(); + } + // get the write lock + serverWriteLock.lock(); + try + { + final String ipAddress = InetAddress.getLocalHost().getHostAddress(); + HibernateCallback callback = new HibernateCallback() + { + public Object doInHibernate(Session session) + { + Query query = session + .getNamedQuery(HibernateNodeDaoServiceImpl.QUERY_GET_SERVER_BY_IPADDRESS) + .setString("ipAddress", ipAddress); + return query.uniqueResult(); + } + }; + server = (Server) getHibernateTemplate().execute(callback); + // create it if it doesn't exist + if (server == null) + { + server = new ServerImpl(); + server.setIpAddress(ipAddress); + try + { + getSession().save(server); + } + catch (DataIntegrityViolationException e) + { + // get it again + server = (Server) getHibernateTemplate().execute(callback); + if (server == null) + { + throw new AlfrescoRuntimeException("Unable to create server instance: " + ipAddress); + } + } + } + return server; + } + catch (Exception e) + { + throw new AlfrescoRuntimeException("Failed to create server instance", e); + } + finally + { + serverWriteLock.unlock(); + } + } + + private static final String RESOURCE_KEY_TRANSACTION_ID = "hibernate.transaction.id"; + private Transaction getCurrentTransaction() + { + Transaction transaction = null; + Serializable txnId = (Serializable) AlfrescoTransactionSupport.getResource(RESOURCE_KEY_TRANSACTION_ID); + if (txnId == null) + { + // no transaction instance has been bound to the transaction + transaction = new TransactionImpl(); + transaction.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); + transaction.setServer(getServer()); + txnId = getHibernateTemplate().save(transaction); + // bind the id + AlfrescoTransactionSupport.bindResource(RESOURCE_KEY_TRANSACTION_ID, txnId); + } + else + { + transaction = (Transaction) getHibernateTemplate().get(TransactionImpl.class, txnId); + } + return transaction; + } + /** * Does this Session contain any changes which must be * synchronized with the store? @@ -218,7 +323,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements { status = new NodeStatusImpl(); status.setKey(nodeKey); - status.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); + status.setTransaction(getCurrentTransaction()); getHibernateTemplate().save(status); } // done @@ -237,7 +342,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements } else { - status.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); + status.getTransaction().setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); } } @@ -259,13 +364,13 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements // If that is the case, then the session has to be flushed so that the database // constraints aren't violated as the node creation will write to the database to // get an ID - if (status.getChangeTxnId().equals(AlfrescoTransactionSupport.getTransactionId())) + if (status.getTransaction().getChangeTxnId().equals(AlfrescoTransactionSupport.getTransactionId())) { // flush getHibernateTemplate().flush(); } } - + // build a concrete node based on a bootstrap type Node node = new NodeImpl(); // set other required properties @@ -277,7 +382,11 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements // set required status properties status.setNode(node); - status.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); + // assign a transaction + if (status.getTransaction() == null) + { + status.setTransaction(getCurrentTransaction()); + } // persist the nodestatus getHibernateTemplate().save(status); @@ -331,7 +440,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements NodeRef nodeRef = node.getNodeRef(); NodeStatus nodeStatus = getNodeStatus(nodeRef, true); nodeStatus.setNode(null); - nodeStatus.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); + nodeStatus.getTransaction().setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); // finally delete the node getHibernateTemplate().delete(node); // flush to ensure constraints can't be violated @@ -371,7 +480,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements { /* * This initial child association creation will fail IFF there is already - * an association of the given type between the two nodes. For new association + * an association of the given type and name between the two nodes. For new association * creation, this can only occur if two transactions attempt to create a secondary * child association between the same two nodes. As this is unlikely, it is * appropriate to just throw a runtime exception and let the second transaction @@ -383,28 +492,18 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements * if the association is recreated subsequently. */ - String uuid = childNode.getUuid(); + // assign a random name to the node + String randomName = GUID.generate(); ChildAssoc assoc = new ChildAssocImpl(); assoc.setTypeQName(assocTypeQName); - assoc.setChildNodeName(getShortName(uuid)); - assoc.setChildNodeNameCrc(getCrc(uuid)); + assoc.setChildNodeName(randomName); + assoc.setChildNodeNameCrc(-1L); // random names compete only with each other assoc.setQname(qname); assoc.setIsPrimary(isPrimary); assoc.buildAssociation(parentNode, childNode); // persist it, catching the duplicate child name - try - { - getHibernateTemplate().save(assoc); - } - catch (DataIntegrityViolationException e) - { - throw new AlfrescoRuntimeException("An association already exists between the two nodes: \n" + - " parent: " + parentNode.getId() + "\n" + - " child: " + childNode.getId() + "\n" + - " assoc: " + assocTypeQName, - e); - } + getHibernateTemplate().save(assoc); // done return assoc; } @@ -422,17 +521,22 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements */ String childNameNew = null; + long crc = -1; if (childName == null) { - childNameNew = childAssoc.getChild().getUuid(); + // random names compete only with each other, i.e. not at all + childNameNew = GUID.generate(); + crc = -1; } else { + // assigned names compete exactly childNameNew = childName.toLowerCase(); + crc = getCrc(childNameNew); } final String childNameNewShort = getShortName(childNameNew); - final long childNameNewCrc = getCrc(childNameNew); + final long childNameNewCrc = crc; // check if the name has changed if (childAssoc.getChildNodeNameCrc() == childNameNewCrc) diff --git a/source/java/org/alfresco/repo/node/index/FullIndexRecoveryComponent.java b/source/java/org/alfresco/repo/node/index/FullIndexRecoveryComponent.java index 3000634953..54fb4c460b 100644 --- a/source/java/org/alfresco/repo/node/index/FullIndexRecoveryComponent.java +++ b/source/java/org/alfresco/repo/node/index/FullIndexRecoveryComponent.java @@ -62,13 +62,13 @@ import org.springframework.orm.hibernate3.support.HibernateDaoSupport; * database is static then the L2 cache usage can be set to use * the NORMAL mode. REFRESH should be * used where the server will still be accessed from some clients - * despite the database changing. + * despite the database changing. NORMAL can be used + * in the case of the caches being clustered, i.e. the caches will + * not be out of date w.r.t. the database. * *

  • - * This process should not run continuously on a live - * server as it would be performing unecessary work. - * If it was left running, however, it would not - * lead to data corruption or such-like. Use the + * This process should only be used continuously where the index + * transactions are following the database transactions. Use the * {@link #setRunContinuously(boolean) runContinuously} property * to change this behaviour. *
  • @@ -91,7 +91,7 @@ public class FullIndexRecoveryComponent extends HibernateDaoSupport implements I private static boolean started = false; /** The current transaction ID being processed */ private static String currentTxnId = START_TXN_ID; - /** kept to notify the thread that it should quite */ + /** kept to notify the thread that it should quit */ private boolean killThread = false; /** provides transactions to atomically index each missed transaction */ @@ -104,8 +104,6 @@ public class FullIndexRecoveryComponent extends HibernateDaoSupport implements I private SearchService searcher; /** the component giving direct access to node instances */ private NodeService nodeService; - /** the stores to reindex */ - private List storeRefs; /** set this to run the index recovery component */ private boolean executeFullRecovery; /** set this on to keep checking for new transactions and never stop */ @@ -125,8 +123,6 @@ public class FullIndexRecoveryComponent extends HibernateDaoSupport implements I public FullIndexRecoveryComponent() { - this.storeRefs = new ArrayList(2); - this.killThread = false; this.executeFullRecovery = false; this.runContinuously = false; @@ -193,21 +189,6 @@ public class FullIndexRecoveryComponent extends HibernateDaoSupport implements I this.nodeService = nodeService; } - /** - * Set the stores that need reindexing - * - * @param storeRefStrings a list of strings representing store references - */ - public void setStores(List storeRefStrings) - { - storeRefs.clear(); - for (String storeRefStr : storeRefStrings) - { - StoreRef storeRef = new StoreRef(storeRefStr); - storeRefs.add(storeRef); - } - } - /** * Set this to true to initiate the full index recovery. *

    @@ -299,6 +280,7 @@ public class FullIndexRecoveryComponent extends HibernateDaoSupport implements I { public Object doWork() { + List storeRefs = nodeService.getStores(); // reindex each store for (StoreRef storeRef : storeRefs) { @@ -352,8 +334,7 @@ public class FullIndexRecoveryComponent extends HibernateDaoSupport implements I if (logger.isDebugEnabled()) { logger.debug("Full index recovery thread started: \n" + - " continuous: " + runContinuously + "\n" + - " stores: " + storeRefs); + " continuous: " + runContinuously); } } } @@ -377,8 +358,8 @@ public class FullIndexRecoveryComponent extends HibernateDaoSupport implements I // reindex nodes List txnsIndexed = FullIndexRecoveryComponent.this.reindexNodes(); // reindex missing content - @SuppressWarnings("unused") - int missingContentCount = FullIndexRecoveryComponent.this.reindexMissingContent(); +// @SuppressWarnings("unused") +// int missingContentCount = FullIndexRecoveryComponent.this.reindexMissingContent(); // check if the process should terminate if (txnsIndexed.size() == 0 && !runContinuously) { @@ -417,73 +398,6 @@ public class FullIndexRecoveryComponent extends HibernateDaoSupport implements I } } - /** - * @return Returns the number of documents reindexed - */ - private int reindexMissingContent() - { - int count = 0; - for (StoreRef storeRef : storeRefs) - { - count += reindexMissingContent(storeRef); - } - return count; - } - - /** - * @param storeRef the store to check for missing content - * @return Returns the number of documents reindexed - */ - private int reindexMissingContent(StoreRef storeRef) - { - SearchParameters sp = new SearchParameters(); - sp.addStore(storeRef); - - // search for it in the index - String query = "TEXT:" + LuceneIndexerImpl.NOT_INDEXED_CONTENT_MISSING; - sp.setLanguage(SearchService.LANGUAGE_LUCENE); - sp.setQuery(query); - ResultSet results = null; - try - { - results = searcher.query(sp); - - int count = 0; - // loop over the results and get the details of the nodes that have missing content - List assocRefs = results.getChildAssocRefs(); - for (ChildAssociationRef assocRef : assocRefs) - { - final NodeRef childNodeRef = assocRef.getChildRef(); - // prompt for a reindex - it might fail again, but we just keep plugging away - TransactionWork reindexWork = new TransactionWork() - { - public Object doWork() - { - indexer.updateNode(childNodeRef); - return null; - } - }; - TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, reindexWork); - count++; - } - // done - if (logger.isDebugEnabled()) - { - logger.debug("Reindexed missing content: \n" + - " store: " + storeRef + "\n" + - " node count: " + count); - } - return count; - } - finally - { - if (results != null) - { - results.close(); - } - } - } - /** * @return Returns the transaction ID just reindexed, i.e. where some work was performed */ @@ -572,16 +486,16 @@ public class FullIndexRecoveryComponent extends HibernateDaoSupport implements I getSession().setCacheMode(l2CacheMode); // reindex each store - for (StoreRef storeRef : storeRefs) - { - if (!nodeService.exists(storeRef)) - { - // the store is not present - continue; - } - // reindex for store - reindexNodes(storeRef, changeTxnId); - } +// for (StoreRef storeRef : storeRefs) +// { +// if (!nodeService.exists(storeRef)) +// { +// // the store is not present +// continue; +// } +// // reindex for store +// reindexNodes(storeRef, changeTxnId); +// } // done return null; } @@ -675,10 +589,10 @@ public class FullIndexRecoveryComponent extends HibernateDaoSupport implements I }; /** - * Retrieve all transaction IDs that are greater than the given transaction ID. + * Retrieve next 50 transaction IDs that are greater than the given transaction ID. * * @param currentTxnId the transaction ID that must be less than all returned results - * @return Returns an ordered list of transaction IDs + * @return Returns an ordered list of the next 50 transaction IDs */ @SuppressWarnings("unchecked") public List getNextChangeTxnIds(final String currentTxnId) @@ -689,6 +603,7 @@ public class FullIndexRecoveryComponent extends HibernateDaoSupport implements I { Query query = session.getNamedQuery(QUERY_GET_NEXT_CHANGE_TXN_IDS); query.setString("currentTxnId", currentTxnId) + .setMaxResults(50) .setReadOnly(true); return query.list(); } diff --git a/source/java/org/alfresco/repo/node/index/FullIndexRecoveryComponentTest.java b/source/java/org/alfresco/repo/node/index/FullIndexRecoveryComponentTest.java index d39df7a8fc..bfd8e4a0ab 100644 --- a/source/java/org/alfresco/repo/node/index/FullIndexRecoveryComponentTest.java +++ b/source/java/org/alfresco/repo/node/index/FullIndexRecoveryComponentTest.java @@ -123,7 +123,7 @@ public class FullIndexRecoveryComponentTest extends TestCase String txnId = TransactionUtil.executeInNonPropagatingUserTransaction(txnService, dropNodeIndexWork); indexRecoverer.setExecuteFullRecovery(true); - indexRecoverer.setStores(storeRefStrings); +// indexRecoverer.setStores(storeRefStrings); // reindex indexRecoverer.reindex(); diff --git a/source/java/org/alfresco/repo/node/index/MissingContentReindexComponent.java b/source/java/org/alfresco/repo/node/index/MissingContentReindexComponent.java new file mode 100644 index 0000000000..2f1b115e62 --- /dev/null +++ b/source/java/org/alfresco/repo/node/index/MissingContentReindexComponent.java @@ -0,0 +1,741 @@ +///* +// * Copyright (C) 2005-2006 Alfresco, Inc. +// * +// * Licensed under the Mozilla Public License version 1.1 +// * with a permitted attribution clause. You may obtain a +// * copy of the License at +// * +// * http://www.alfresco.org/legal/license.txt +// * +// * Unless required by applicable law or agreed to in writing, +// * software distributed under the License is distributed on an +// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +// * either express or implied. See the License for the specific +// * language governing permissions and limitations under the +// * License. +// */ +//package org.alfresco.repo.node.index; +// +//import java.util.ArrayList; +//import java.util.List; +// +//import org.alfresco.error.AlfrescoRuntimeException; +//import org.alfresco.model.ContentModel; +//import org.alfresco.repo.domain.NodeStatus; +//import org.alfresco.repo.search.Indexer; +//import org.alfresco.repo.search.impl.lucene.LuceneIndexerImpl; +//import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer; +//import org.alfresco.repo.transaction.TransactionUtil; +//import org.alfresco.repo.transaction.TransactionUtil.TransactionWork; +//import org.alfresco.service.cmr.repository.ChildAssociationRef; +//import org.alfresco.service.cmr.repository.NodeRef; +//import org.alfresco.service.cmr.repository.NodeService; +//import org.alfresco.service.cmr.repository.StoreRef; +//import org.alfresco.service.cmr.search.ResultSet; +//import org.alfresco.service.cmr.search.SearchParameters; +//import org.alfresco.service.cmr.search.SearchService; +//import org.alfresco.service.transaction.TransactionService; +//import org.apache.commons.logging.Log; +//import org.apache.commons.logging.LogFactory; +//import org.hibernate.CacheMode; +//import org.hibernate.Query; +//import org.hibernate.Session; +//import org.springframework.orm.hibernate3.HibernateCallback; +//import org.springframework.orm.hibernate3.support.HibernateDaoSupport; +// +///** +// * Ensures that the FTS indexing picks up on any outstanding documents that +// * require indexing. +// *

    +// * This component must be used as a singleton (one per VM) and may only be +// * called to reindex once. It will start a thread that processes all available +// * transactions and keeps checking to ensure that the index is up to date with +// * the latest database changes. +// *

    +// * The following points are important: +// *

      +// *
    • +// * By default, the Hibernate L2 cache is used during processing. +// * This can be disabled by either disabling the L2 cache globally +// * for the server (not recommended) or by setting the +// * {@link #setL2CacheMode(String) l2CacheMode} property. If the +// * database is static then the L2 cache usage can be set to use +// * the NORMAL mode. REFRESH should be +// * used where the server will still be accessed from some clients +// * despite the database changing. NORMAL can be used +// * in the case of the caches being clustered, i.e. the caches will +// * not be out of date w.r.t. the database. +// *
    • +// *
    • +// * This process should only be used continuously where the index +// * transactions are following the database transactions. Use the +// * {@link #setRunContinuously(boolean) runContinuously} property +// * to change this behaviour. +// *
    • +// *
    +// * +// * @author Derek Hulley +// */ +//public class MissingContentReindexComponent extends HibernateDaoSupport implements IndexRecovery +//{ +// public static final String QUERY_GET_NEXT_CHANGE_TXN_IDS = "node.GetNextChangeTxnIds"; +// public static final String QUERY_GET_CHANGED_NODE_STATUSES = "node.GetChangedNodeStatuses"; +// public static final String QUERY_GET_DELETED_NODE_STATUSES = "node.GetDeletedNodeStatuses"; +// public static final String QUERY_GET_CHANGED_NODE_STATUSES_COUNT = "node.GetChangedNodeStatusesCount"; +// +// private static final String START_TXN_ID = "000"; +// +// private static Log logger = LogFactory.getLog(FullIndexRecoveryComponent.class); +// +// /** ensures that this process is kicked off once per VM */ +// private static boolean started = false; +// /** The current transaction ID being processed */ +// private static String currentTxnId = START_TXN_ID; +// /** kept to notify the thread that it should quite */ +// private boolean killThread = false; +// +// /** provides transactions to atomically index each missed transaction */ +// private TransactionService transactionService; +// /** the component to index the node hierarchy */ +// private Indexer indexer; +// /** the FTS indexer that we will prompt to pick up on any un-indexed text */ +// private FullTextSearchIndexer ftsIndexer; +// /** the component providing searches of the indexed nodes */ +// private SearchService searcher; +// /** the component giving direct access to node instances */ +// private NodeService nodeService; +// /** set this to run the index recovery component */ +// private boolean executeFullRecovery; +// /** set this on to keep checking for new transactions and never stop */ +// private boolean runContinuously; +// /** set the time to wait between checking indexes */ +// private long waitTime; +// /** controls how the L2 cache is used */ +// private CacheMode l2CacheMode; +// +// /** +// * @return Returns the ID of the current (or last) transaction processed +// */ +// public static String getCurrentTransactionId() +// { +// return currentTxnId; +// } +// +// public FullIndexRecoveryComponent() +// { +// this.killThread = false; +// this.executeFullRecovery = false; +// this.runContinuously = false; +// this.waitTime = 1000L; +// this.l2CacheMode = CacheMode.REFRESH; +// +// // ensure that we kill the thread when the VM is shutting down +// Runnable shutdownRunnable = new Runnable() +// { +// public void run() +// { +// killThread = true; +// }; +// }; +// Thread shutdownThread = new Thread(shutdownRunnable); +// Runtime.getRuntime().addShutdownHook(shutdownThread); +// } +// +// /** +// * @return Returns true if the component has already been started +// */ +// public static boolean isStarted() +// { +// return started; +// } +// +// /** +// * @param transactionService provide transactions to index each missed transaction +// */ +// public void setTransactionService(TransactionService transactionService) +// { +// this.transactionService = transactionService; +// } +// +// /** +// * @param indexer the indexer that will be index +// */ +// public void setIndexer(Indexer indexer) +// { +// this.indexer = indexer; +// } +// +// /** +// * @param ftsIndexer the FTS background indexer +// */ +// public void setFtsIndexer(FullTextSearchIndexer ftsIndexer) +// { +// this.ftsIndexer = ftsIndexer; +// } +// +// /** +// * @param searcher component providing index searches +// */ +// public void setSearcher(SearchService searcher) +// { +// this.searcher = searcher; +// } +// +// /** +// * @param nodeService provides information about nodes for indexing +// */ +// public void setNodeService(NodeService nodeService) +// { +// this.nodeService = nodeService; +// } +// +// /** +// * Set this to true to initiate the full index recovery. +// *

    +// * This used to default to true but is now false. Set this +// * if the potentially long-running process of checking and fixing the +// * indexes must be started. +// * +// * @param executeFullRecovery +// */ +// public void setExecuteFullRecovery(boolean executeFullRecovery) +// { +// this.executeFullRecovery = executeFullRecovery; +// } +// +// /** +// * Set this to ensure that the process continuously checks for new transactions. +// * If not, it will permanently terminate once it catches up with the current +// * transactions. +// * +// * @param runContinuously true to never cease looking for new transactions +// */ +// public void setRunContinuously(boolean runContinuously) +// { +// this.runContinuously = runContinuously; +// } +// +// /** +// * Set the time to wait between checking for new transaction changes in the database. +// * +// * @param waitTime the time to wait in milliseconds +// */ +// public void setWaitTime(long waitTime) +// { +// this.waitTime = waitTime; +// } +// +// /** +// * Set the hibernate cache mode by name +// * +// * @see org.hibernate.CacheMode +// */ +// public void setL2CacheMode(String l2CacheModeStr) +// { +// if (l2CacheModeStr.equals("GET")) +// { +// l2CacheMode = CacheMode.GET; +// } +// else if (l2CacheModeStr.equals("IGNORE")) +// { +// l2CacheMode = CacheMode.IGNORE; +// } +// else if (l2CacheModeStr.equals("NORMAL")) +// { +// l2CacheMode = CacheMode.NORMAL; +// } +// else if (l2CacheModeStr.equals("PUT")) +// { +// l2CacheMode = CacheMode.PUT; +// } +// else if (l2CacheModeStr.equals("REFRESH")) +// { +// l2CacheMode = CacheMode.REFRESH; +// } +// else +// { +// throw new IllegalArgumentException("Unrecognised Hibernate L2 cache mode: " + l2CacheModeStr); +// } +// } +// +// /** +// * Ensure that the index is up to date with the current state of the persistence layer. +// * The full list of unique transaction change IDs is retrieved and used to detect +// * which are not present in the index. All the node changes and deletions for the +// * remaining transactions are then indexed. +// */ +// public synchronized void reindex() +// { +// if (FullIndexRecoveryComponent.started) +// { +// throw new AlfrescoRuntimeException +// ("Only one FullIndexRecoveryComponent may be used per VM and it may only be called once"); +// } +// +// // ensure that we don't redo this work +// FullIndexRecoveryComponent.started = true; +// +// // work to mark the stores for full text reindexing +// TransactionWork ftsReindexWork = new TransactionWork() +// { +// public Object doWork() +// { +// List storeRefs = nodeService.getStores(); +// // reindex each store +// for (StoreRef storeRef : storeRefs) +// { +// // check if the store exists +// if (!nodeService.exists(storeRef)) +// { +// // store does not exist +// if (logger.isDebugEnabled()) +// { +// logger.debug("Skipping reindex of non-existent store: " + storeRef); +// } +// continue; +// } +// +// // prompt FTS to reindex the store +// ftsIndexer.requiresIndex(storeRef); +// } +// // done +// if (logger.isDebugEnabled()) +// { +// logger.debug("Prompted FTS index on stores: " + storeRefs); +// } +// return null; +// } +// }; +// TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, ftsReindexWork); +// +// // start full index recovery, if necessary +// if (!this.executeFullRecovery) +// { +// if (logger.isDebugEnabled()) +// { +// logger.debug("Full index recovery is off - quitting"); +// } +// } +// else +// { +// // set the state of the reindex +// FullIndexRecoveryComponent.currentTxnId = START_TXN_ID; +// +// // start a stateful thread that will begin processing the reindexing the transactions +// Runnable runnable = new ReindexRunner(); +// Thread reindexThread = new Thread(runnable); +// // make it a daemon thread +// reindexThread.setDaemon(true); +// // it should not be a high priority +// reindexThread.setPriority(Thread.MIN_PRIORITY); +// // start it +// reindexThread.start(); +// +// if (logger.isDebugEnabled()) +// { +// logger.debug("Full index recovery thread started: \n" + +// " continuous: " + runContinuously); +// } +// } +// } +// +// /** +// * Stateful thread runnable that executes reindex calls. +// * +// * @see FullIndexRecoveryComponent#reindexNodes() +// * +// * @author Derek Hulley +// */ +// private class ReindexRunner implements Runnable +// { +// public void run() +// { +// // keep this thread going permanently +// while (!killThread) +// { +// try +// { +// // reindex nodes +// List txnsIndexed = FullIndexRecoveryComponent.this.reindexNodes(); +// // reindex missing content +// @SuppressWarnings("unused") +// int missingContentCount = FullIndexRecoveryComponent.this.reindexMissingContent(); +// // check if the process should terminate +// if (txnsIndexed.size() == 0 && !runContinuously) +// { +// // the thread has caught up with all the available work and should not +// // run continuously +// if (logger.isDebugEnabled()) +// { +// logger.debug("Thread quitting - no more available indexing to do: \n" + +// " last txn: " + FullIndexRecoveryComponent.getCurrentTransactionId()); +// } +// break; +// } +// // brief pause +// synchronized(FullIndexRecoveryComponent.this) +// { +// FullIndexRecoveryComponent.this.wait(waitTime); +// } +// } +// catch (InterruptedException e) +// { +// // ignore +// } +// catch (Throwable e) +// { +// if (killThread) +// { +// // the shutdown may have caused the exception - ignore it +// } +// else +// { +// // we are still a go; report it +// logger.error("Reindex failure", e); +// } +// } +// } +// } +// } +// +// /** +// * @return Returns the number of documents reindexed +// */ +// private int reindexMissingContent() +// { +// int count = 0; +// for (StoreRef storeRef : storeRefs) +// { +// count += reindexMissingContent(storeRef); +// } +// return count; +// } +// +// /** +// * @param storeRef the store to check for missing content +// * @return Returns the number of documents reindexed +// */ +// private int reindexMissingContent(StoreRef storeRef) +// { +// SearchParameters sp = new SearchParameters(); +// sp.addStore(storeRef); +// +// // search for it in the index +// String query = "TEXT:" + LuceneIndexerImpl.NOT_INDEXED_CONTENT_MISSING; +// sp.setLanguage(SearchService.LANGUAGE_LUCENE); +// sp.setQuery(query); +// ResultSet results = null; +// try +// { +// results = searcher.query(sp); +// +// int count = 0; +// // loop over the results and get the details of the nodes that have missing content +// List assocRefs = results.getChildAssocRefs(); +// for (ChildAssociationRef assocRef : assocRefs) +// { +// final NodeRef childNodeRef = assocRef.getChildRef(); +// // prompt for a reindex - it might fail again, but we just keep plugging away +// TransactionWork reindexWork = new TransactionWork() +// { +// public Object doWork() +// { +// indexer.updateNode(childNodeRef); +// return null; +// } +// }; +// TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, reindexWork); +// count++; +// } +// // done +// if (logger.isDebugEnabled()) +// { +// logger.debug("Reindexed missing content: \n" + +// " store: " + storeRef + "\n" + +// " node count: " + count); +// } +// return count; +// } +// finally +// { +// if (results != null) +// { +// results.close(); +// } +// } +// } +// +// /** +// * @return Returns the transaction ID just reindexed, i.e. where some work was performed +// */ +// private List reindexNodes() +// { +// // get a list of all transactions still requiring a check +// List txnsToCheck = getNextChangeTxnIds(FullIndexRecoveryComponent.currentTxnId); +// +// // loop over each transaction +// for (String changeTxnId : txnsToCheck) +// { +// reindexNodes(changeTxnId); +// } +// +// // done +// return txnsToCheck; +// } +// +// /** +// * Reindexes changes specific to the change transaction ID. +// *

    +// * All exceptions are absorbed. +// */ +// private void reindexNodes(final String changeTxnId) +// { +// /* +// * This must execute each within its own transaction. +// * The cache size is therefore not an issue. +// */ +// TransactionWork reindexWork = new TransactionWork() +// { +// public Object doWork() throws Exception +// { +// // perform the work in a Hibernate callback +// HibernateCallback callback = new ReindexCallback(changeTxnId); +// getHibernateTemplate().execute(callback); +// // done +// return null; +// } +// }; +// try +// { +// TransactionUtil.executeInNonPropagatingUserTransaction(transactionService, reindexWork); +// } +// catch (Throwable e) +// { +// logger.error("Transaction reindex failed: \n" + +// " txn: " + changeTxnId, +// e); +// } +// finally +// { +// // Up the current transaction now, in case the process fails at this point. +// // This will prevent the transaction from being processed again. +// // This applies to failures as well, which should be dealt with externally +// // and having the entire process start again, e.g. such as a system reboot +// currentTxnId = changeTxnId; +// } +// } +// +// /** +// * Stateful inner class that implements a single reindex call for a given store +// * and transaction. +// *

    +// * It must be called within its own transaction. +// * +// * @author Derek Hulley +// */ +// private class ReindexCallback implements HibernateCallback +// { +// private final String changeTxnId; +// +// public ReindexCallback(String changeTxnId) +// { +// this.changeTxnId = changeTxnId; +// } +// +// /** +// * Changes the L2 cache usage before reindexing for each store +// * +// * @see #reindexNodes(StoreRef, String) +// */ +// public Object doInHibernate(Session session) +// { +// // set the way the L2 cache is used +// getSession().setCacheMode(l2CacheMode); +// +// // reindex each store +// for (StoreRef storeRef : storeRefs) +// { +// if (!nodeService.exists(storeRef)) +// { +// // the store is not present +// continue; +// } +// // reindex for store +// reindexNodes(storeRef, changeTxnId); +// } +// // done +// return null; +// } +// +// private void reindexNodes(StoreRef storeRef, String changeTxnId) +// { +// // check if we need to perform this operation +// SearchParameters sp = new SearchParameters(); +// sp.addStore(storeRef); +// +// // search for it in the index +// String query = "TX:\"" + changeTxnId + "\""; +// sp.setLanguage(SearchService.LANGUAGE_LUCENE); +// sp.setQuery(query); +// ResultSet results = null; +// try +// { +// results = searcher.query(sp); +// // did the index have any of these changes? +// if (results.length() > 0) +// { +// // the transaction has an entry in the index - assume that it was +// // atomically correct +// if (logger.isDebugEnabled()) +// { +// logger.debug("Transaction present in index - no indexing required: \n" + +// " store: " + storeRef + "\n" + +// " txn: " + changeTxnId); +// } +// return; +// } +// } +// finally +// { +// if (results != null) +// { +// results.close(); +// } +// } +// // the index has no record of this +// // were there any changes, or is it all just deletions? +// int changedCount = getChangedNodeStatusesCount(storeRef, changeTxnId); +// if (changedCount == 0) +// { +// // no nodes were changed in the transaction, i.e. they are only deletions +// // the index is quite right not to have any entries for the transaction +// if (logger.isDebugEnabled()) +// { +// logger.debug("Transaction only has deletions - no indexing required: \n" + +// " store: " + storeRef + "\n" + +// " txn: " + changeTxnId); +// } +// return; +// } +// +// // process the deletions relevant to the txn and the store +// List deletedNodeStatuses = getDeletedNodeStatuses(storeRef, changeTxnId); +// for (NodeStatus status : deletedNodeStatuses) +// { +// NodeRef nodeRef = new NodeRef(storeRef, status.getKey().getGuid()); +// // only the child node ref is relevant +// ChildAssociationRef assocRef = new ChildAssociationRef( +// ContentModel.ASSOC_CHILDREN, +// null, +// null, +// nodeRef); +// indexer.deleteNode(assocRef); +// } +// +// // process additions +// List changedNodeStatuses = getChangedNodeStatuses(storeRef, changeTxnId); +// for (NodeStatus status : changedNodeStatuses) +// { +// NodeRef nodeRef = new NodeRef(storeRef, status.getKey().getGuid()); +// // get the primary assoc for the node +// ChildAssociationRef primaryAssocRef = nodeService.getPrimaryParent(nodeRef); +// // reindex +// indexer.createNode(primaryAssocRef); +// } +// +// // done +// if (logger.isDebugEnabled()) +// { +// logger.debug("Transaction reindexed: \n" + +// " store: " + storeRef + "\n" + +// " txn: " + changeTxnId + "\n" + +// " deletions: " + deletedNodeStatuses.size() + "\n" + +// " modifications: " + changedNodeStatuses.size()); +// } +// } +// }; +// +// /** +// * Retrieve all transaction IDs that are greater than the given transaction ID. +// * +// * @param currentTxnId the transaction ID that must be less than all returned results +// * @return Returns an ordered list of transaction IDs +// */ +// @SuppressWarnings("unchecked") +// public List getNextChangeTxnIds(final String currentTxnId) +// { +// HibernateCallback callback = new HibernateCallback() +// { +// public Object doInHibernate(Session session) +// { +// Query query = session.getNamedQuery(QUERY_GET_NEXT_CHANGE_TXN_IDS); +// query.setString("currentTxnId", currentTxnId) +// .setReadOnly(true); +// return query.list(); +// } +// }; +// List queryResults = (List) getHibernateTemplate().execute(callback); +// // done +// return queryResults; +// } +// +// @SuppressWarnings("unchecked") +// public int getChangedNodeStatusesCount(final StoreRef storeRef, final String changeTxnId) +// { +// HibernateCallback callback = new HibernateCallback() +// { +// public Object doInHibernate(Session session) +// { +// Query query = session.getNamedQuery(QUERY_GET_CHANGED_NODE_STATUSES_COUNT); +// query.setString("storeProtocol", storeRef.getProtocol()) +// .setString("storeIdentifier", storeRef.getIdentifier()) +// .setString("changeTxnId", changeTxnId) +// .setReadOnly(true); +// return query.uniqueResult(); +// } +// }; +// Integer changeCount = (Integer) getHibernateTemplate().execute(callback); +// // done +// return changeCount.intValue(); +// } +// +// @SuppressWarnings("unchecked") +// public List getChangedNodeStatuses(final StoreRef storeRef, final String changeTxnId) +// { +// HibernateCallback callback = new HibernateCallback() +// { +// public Object doInHibernate(Session session) +// { +// Query query = session.getNamedQuery(QUERY_GET_CHANGED_NODE_STATUSES); +// query.setString("storeProtocol", storeRef.getProtocol()) +// .setString("storeIdentifier", storeRef.getIdentifier()) +// .setString("changeTxnId", changeTxnId) +// .setReadOnly(true); +// return query.list(); +// } +// }; +// List queryResults = (List) getHibernateTemplate().execute(callback); +// // done +// return queryResults; +// } +// +// @SuppressWarnings("unchecked") +// public List getDeletedNodeStatuses(final StoreRef storeRef, final String changeTxnId) +// { +// HibernateCallback callback = new HibernateCallback() +// { +// public Object doInHibernate(Session session) +// { +// Query query = session.getNamedQuery(QUERY_GET_DELETED_NODE_STATUSES); +// query.setString("storeProtocol", storeRef.getProtocol()) +// .setString("storeIdentifier", storeRef.getIdentifier()) +// .setString("changeTxnId", changeTxnId) +// .setReadOnly(true); +// return query.list(); +// } +// }; +// List queryResults = (List) getHibernateTemplate().execute(callback); +// // done +// return queryResults; +// } +//} \ No newline at end of file diff --git a/source/java/org/alfresco/util/debug/MethodCallLogAdvice.java b/source/java/org/alfresco/util/debug/MethodCallLogAdvice.java deleted file mode 100644 index a80048bf3c..0000000000 --- a/source/java/org/alfresco/util/debug/MethodCallLogAdvice.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright (C) 2005 Alfresco, Inc. - * - * Licensed under the Mozilla Public License version 1.1 - * with a permitted attribution clause. You may obtain a - * copy of the License at - * - * http://www.alfresco.org/legal/license.txt - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, - * either express or implied. See the License for the specific - * language governing permissions and limitations under the - * License. - */ -package org.alfresco.util.debug; - -import org.alfresco.repo.transaction.AlfrescoTransactionSupport; -import org.aopalliance.intercept.MethodInterceptor; -import org.aopalliance.intercept.MethodInvocation; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -/** - * Performs writing to DEBUG of incoming arguments and outgoing results for a method call.
    - * If the method invocation throws an exception, then the incoming arguments are - * logged to DEBUG as well.
    - * The implementation adds very little overhead to a normal method - * call by only building log messages when required. - *

    - * The logging is done against the logger retrieved using the names: - *

    - *

    - *      org.alfresco.util.debug.MethodCallLogAdvice
    - *         AND
    - *      targetClassName
    - *      targetClassName.methodName
    - *      targetClassName.methodName.exception
    - * 
    - *

    - * The following examples show how to control the log levels: - *

    - *

    - *      org.alfresco.util.debug.MethodCallLogAdvice=DEBUG   # activate method logging
    - *          AND
    - *      x.y.MyClass=DEBUG                           # log debug for all method calls on MyClass
    - *      x.y.MyClass.doSomething=DEBUG               # log debug for all doSomething method calls
    - *      x.y.MyClass.doSomething.exception=DEBUG     # only log debug for doSomething() upon exception
    - * 
    - *

    - * - * @author Derek Hulley - */ -public class MethodCallLogAdvice implements MethodInterceptor -{ - private static final Log logger = LogFactory.getLog(MethodCallLogAdvice.class); - - public Object invoke(MethodInvocation invocation) throws Throwable - { - if (logger.isDebugEnabled()) - { - return invokeWithLogging(invocation); - } - else - { - // no logging required - return invocation.proceed(); - } - } - - /** - * Only executes logging code if logging is required - */ - private Object invokeWithLogging(MethodInvocation invocation) throws Throwable - { - String methodName = invocation.getMethod().getName(); - String className = invocation.getMethod().getDeclaringClass().getName(); - - // execute as normal - try - { - Object ret = invocation.proceed(); - // logging - Log methodLogger = LogFactory.getLog(className + "." + methodName); - if (methodLogger.isDebugEnabled()) - { - // log success - StringBuffer sb = getInvocationInfo(className, methodName, invocation.getArguments()); - sb.append(" Result: ").append(ret); - methodLogger.debug(sb); - } - // done - return ret; - } - catch (Throwable e) - { - Log exceptionLogger = LogFactory.getLog(className + "." + methodName + ".exception"); - if (exceptionLogger.isDebugEnabled()) - { - StringBuffer sb = getInvocationInfo(className, methodName, invocation.getArguments()); - sb.append(" Failure: ").append(e.getClass().getName()).append(" - ").append(e.getMessage()); - exceptionLogger.debug(sb); - } - // rethrow - throw e; - } - } - - /** - * Return format: - *

    -     *      Method: className#methodName
    -     *         Argument: arg0
    -     *         Argument: arg1
    -     *         ...
    -     *         Argument: argN {newline}
    -     * 
    - * - * @param className - * @param methodName - * @param args - * @return Returns a StringBuffer containing the details of a method call - */ - private StringBuffer getInvocationInfo(String className, String methodName, Object[] args) - { - StringBuffer sb = new StringBuffer(250); - sb.append("\nMethod: ").append(className).append("#").append(methodName).append("\n"); - sb.append(" Transaction: ").append(AlfrescoTransactionSupport.getTransactionId()).append("\n"); - for (Object arg : args) - { - sb.append(" Argument: ").append(arg).append("\n"); - } - return sb; - } -}