diff --git a/config/alfresco/attributes-service-context.xml b/config/alfresco/attributes-service-context.xml index 45eac09fe8..a8b8bb0fe9 100644 --- a/config/alfresco/attributes-service-context.xml +++ b/config/alfresco/attributes-service-context.xml @@ -10,4 +10,30 @@ + + + + + org.alfresco.repo.attributes.PropTablesCleanupJob + + + + + + + + + + + + + + + + + + + ${attributes.propcleaner.cronExpression} + + \ No newline at end of file diff --git a/config/alfresco/dao/dao-context.xml b/config/alfresco/dao/dao-context.xml index ba566a5ddd..7d3a7b50ec 100644 --- a/config/alfresco/dao/dao-context.xml +++ b/config/alfresco/dao/dao-context.xml @@ -175,6 +175,25 @@ + + + + + + + + + + + + + + + + + + + @@ -193,6 +212,8 @@ + + diff --git a/config/alfresco/dbscripts/utility/org.hibernate.dialect.MySQLInnoDBDialect/CleanAlfPropTables.sql b/config/alfresco/dbscripts/utility/org.hibernate.dialect.MySQLInnoDBDialect/CleanAlfPropTables.sql new file mode 100644 index 0000000000..c0b3845656 --- /dev/null +++ b/config/alfresco/dbscripts/utility/org.hibernate.dialect.MySQLInnoDBDialect/CleanAlfPropTables.sql @@ -0,0 +1,64 @@ +--BEGIN TXN + +-- get all active references to alf_prop_root +--FOREACH alf_audit_app.id system.upgrade.clean_alf_prop_tables.batchsize +create table temp_prop_root_ref as select disabled_paths_id as id from alf_audit_app where id >= ${LOWERBOUND} and id <= ${UPPERBOUND}; +create index idx_temp_prop_root_ref_id on temp_prop_root_ref(id); +--FOREACH alf_audit_entry.audit_values_id system.upgrade.clean_alf_prop_tables.batchsize +insert into temp_prop_root_ref select audit_values_id from alf_audit_entry where audit_values_id >= ${LOWERBOUND} and audit_values_id <= ${UPPERBOUND}; +--FOREACH alf_prop_unique_ctx.prop1_id system.upgrade.clean_alf_prop_tables.batchsize +insert into temp_prop_root_ref select prop1_id from alf_prop_unique_ctx where prop1_id is not null and prop1_id >= ${LOWERBOUND} and prop1_id <= ${UPPERBOUND}; + +-- determine the obsolete entries from alf_prop_root +--FOREACH alf_prop_root.id system.upgrade.clean_alf_prop_tables.batchsize +create table temp_prop_root_abs as select alf_prop_root.id from alf_prop_root left join temp_prop_root_ref on temp_prop_root_ref.id = alf_prop_root.id where temp_prop_root_ref.id is null and alf_prop_root.id >= ${LOWERBOUND} and alf_prop_root.id <= ${UPPERBOUND}; +create index idx_temp_prop_root_abs_id on temp_prop_root_abs(id); + +-- clear alf_prop_root which cascades DELETE to alf_prop_link +--FOREACH temp_prop_root_abs.id system.upgrade.clean_alf_prop_tables.batchsize +delete from alf_prop_root where id in (select id from temp_prop_root_abs where id >= ${LOWERBOUND} and id <= ${UPPERBOUND}); + +-- get all active references to alf_prop_value + +--FOREACH alf_prop_value.id system.upgrade.clean_alf_prop_tables.batchsize +create table temp_prop_val_ref as select id from alf_prop_value where id in (select app_name_id from alf_audit_app) and id >= ${LOWERBOUND} and id <= ${UPPERBOUND}; +create index idx_temp_prop_val_ref_id on temp_prop_val_ref(id); +--FOREACH alf_audit_entry.audit_user_id system.upgrade.clean_alf_prop_tables.batchsize +insert into temp_prop_val_ref select audit_user_id from alf_audit_entry where audit_user_id >= ${LOWERBOUND} and audit_user_id <= ${UPPERBOUND}; +--FOREACH alf_prop_link.key_prop_id system.upgrade.clean_alf_prop_tables.batchsize +insert into temp_prop_val_ref select key_prop_id from alf_prop_link where key_prop_id >= ${LOWERBOUND} and key_prop_id <= ${UPPERBOUND}; +--FOREACH alf_prop_link.value_prop_id system.upgrade.clean_alf_prop_tables.batchsize +insert into temp_prop_val_ref select value_prop_id from alf_prop_link where value_prop_id >= ${LOWERBOUND} and value_prop_id <= ${UPPERBOUND}; +--FOREACH alf_prop_unique_ctx.value1_prop_id system.upgrade.clean_alf_prop_tables.batchsize +insert into temp_prop_val_ref select value1_prop_id from alf_prop_unique_ctx where value1_prop_id >= ${LOWERBOUND} and value1_prop_id <= ${UPPERBOUND}; +--FOREACH alf_prop_unique_ctx.value2_prop_id system.upgrade.clean_alf_prop_tables.batchsize +insert into temp_prop_val_ref select value2_prop_id from alf_prop_unique_ctx where value2_prop_id >= ${LOWERBOUND} and value2_prop_id <= ${UPPERBOUND}; +--FOREACH alf_prop_unique_ctx.value3_prop_id system.upgrade.clean_alf_prop_tables.batchsize +insert into temp_prop_val_ref select value3_prop_id from alf_prop_unique_ctx where value3_prop_id >= ${LOWERBOUND} and value3_prop_id <= ${UPPERBOUND}; + +-- determine the obsolete entries from alf_prop_value +--FOREACH alf_prop_value.id system.upgrade.clean_alf_prop_tables.batchsize +create table temp_prop_val_abs as select apv.id, apv.persisted_type, apv.long_value from alf_prop_value apv left join temp_prop_val_ref on (apv.id = temp_prop_val_ref.id) where temp_prop_val_ref.id is null and apv.id >= ${LOWERBOUND} and apv.id <= ${UPPERBOUND}; +create index idx_temp_prop_val_abs_id on temp_prop_val_abs(id); +create index idx_temp_prop_val_abs_per on temp_prop_val_abs(persisted_type, id, long_value); + +-- clear the obsolete entries +--FOREACH temp_prop_val_abs.id system.upgrade.clean_alf_prop_tables.batchsize +delete from alf_prop_value where id in (select id from temp_prop_val_abs where id >= ${LOWERBOUND} and id <= ${UPPERBOUND}); + +-- find and clear obsoleted string values +create table temp_del_str as select temp_prop_val_abs.long_value as string_id from temp_prop_val_abs left join alf_prop_value apv on (apv.id = temp_prop_val_abs.id) where temp_prop_val_abs.persisted_type in (3,5,6) and apv.id is null; +--FOREACH temp_del_str.string_id system.upgrade.clean_alf_prop_tables.batchsize +delete from alf_prop_string_value where id in (select id from temp_del_str where id >= ${LOWERBOUND} and id <= ${UPPERBOUND}); + +-- find and clear obsoleted serialized values +create table temp_del_ser as select temp_prop_val_abs.long_value as string_id from temp_prop_val_abs left join alf_prop_value apv on (apv.id = temp_prop_val_abs.id) where temp_prop_val_abs.persisted_type = 4 and apv.id is null; +--FOREACH temp_del_ser.string_id system.upgrade.clean_alf_prop_tables.batchsize +delete from alf_prop_serializable_value where id in (select id from temp_del_ser where id >= ${LOWERBOUND} and id <= ${UPPERBOUND}); + +-- find and clear obsoleted double values +create table temp_del_double as select temp_prop_val_abs.long_value as string_id from temp_prop_val_abs left join alf_prop_value apv on (apv.id = temp_prop_val_abs.id) where temp_prop_val_abs.persisted_type = 2 and apv.id is null; +--FOREACH temp_del_double.string_id system.upgrade.clean_alf_prop_tables.batchsize +delete from alf_prop_double_value where id in (select id from temp_del_double where id >= ${LOWERBOUND} and id <= ${UPPERBOUND}); + +--END TXN \ No newline at end of file diff --git a/config/alfresco/dbscripts/utility/org.hibernate.dialect.MySQLInnoDBDialect/CleanAlfPropTablesPostExec.sql b/config/alfresco/dbscripts/utility/org.hibernate.dialect.MySQLInnoDBDialect/CleanAlfPropTablesPostExec.sql new file mode 100644 index 0000000000..b8bf57f498 --- /dev/null +++ b/config/alfresco/dbscripts/utility/org.hibernate.dialect.MySQLInnoDBDialect/CleanAlfPropTablesPostExec.sql @@ -0,0 +1,12 @@ +--BEGIN TXN + +-- cleanup temporary structures +drop table temp_prop_root_ref; --(optional) +drop table temp_prop_root_abs; --(optional) +drop table temp_prop_val_ref; --(optional) +drop table temp_prop_val_abs; --(optional) +drop table temp_del_str; --(optional) +drop table temp_del_ser; --(optional) +drop table temp_del_double; --(optional) + +--END TXN \ No newline at end of file diff --git a/config/alfresco/dbscripts/utility/org.hibernate.dialect.PostgreSQLDialect/CleanAlfPropTables.sql b/config/alfresco/dbscripts/utility/org.hibernate.dialect.PostgreSQLDialect/CleanAlfPropTables.sql new file mode 100644 index 0000000000..08eaab2f7b --- /dev/null +++ b/config/alfresco/dbscripts/utility/org.hibernate.dialect.PostgreSQLDialect/CleanAlfPropTables.sql @@ -0,0 +1,63 @@ +--BEGIN TXN + +-- get all active references to alf_prop_root +--FOREACH alf_audit_app.id system.upgrade.clean_alf_prop_tables.batchsize +create temp table temp_prop_root_ref as select disabled_paths_id as id from alf_audit_app where id >= ${LOWERBOUND} and id <= ${UPPERBOUND}; +create index idx_temp_prop_root_ref_id on temp_prop_root_ref(id); +--FOREACH alf_audit_entry.audit_values_id system.upgrade.clean_alf_prop_tables.batchsize +insert into temp_prop_root_ref select audit_values_id from alf_audit_entry where audit_values_id >= ${LOWERBOUND} and audit_values_id <= ${UPPERBOUND}; +--FOREACH alf_prop_unique_ctx.prop1_id system.upgrade.clean_alf_prop_tables.batchsize +insert into temp_prop_root_ref select prop1_id from alf_prop_unique_ctx where prop1_id is not null and prop1_id >= ${LOWERBOUND} and prop1_id <= ${UPPERBOUND}; + +-- determine the obsolete entries from alf_prop_root +--FOREACH alf_prop_root.id system.upgrade.clean_alf_prop_tables.batchsize +create temp table temp_prop_root_abs as select alf_prop_root.id from alf_prop_root left join temp_prop_root_ref on temp_prop_root_ref.id = alf_prop_root.id where temp_prop_root_ref.id is null and alf_prop_root.id >= ${LOWERBOUND} and alf_prop_root.id <= ${UPPERBOUND}; +create index idx_temp_prop_root_abs_id on temp_prop_root_abs(id); + +-- clear alf_prop_root which cascades DELETE to alf_prop_link +--FOREACH temp_prop_root_abs.id system.upgrade.clean_alf_prop_tables.batchsize +delete from alf_prop_root where id in (select id from temp_prop_root_abs where id >= ${LOWERBOUND} and id <= ${UPPERBOUND}); + +-- get all active references to alf_prop_value +--FOREACH alf_prop_value.id system.upgrade.clean_alf_prop_tables.batchsize +create temp table temp_prop_val_ref as select id from alf_prop_value where id in (select app_name_id from alf_audit_app) and id >= ${LOWERBOUND} and id <= ${UPPERBOUND}; +create index idx_temp_prop_val_ref_id on temp_prop_val_ref(id); +--FOREACH alf_audit_entry.audit_user_id system.upgrade.clean_alf_prop_tables.batchsize +insert into temp_prop_val_ref select audit_user_id from alf_audit_entry where audit_user_id >= ${LOWERBOUND} and audit_user_id <= ${UPPERBOUND}; +--FOREACH alf_prop_link.key_prop_id system.upgrade.clean_alf_prop_tables.batchsize +insert into temp_prop_val_ref select key_prop_id from alf_prop_link where key_prop_id >= ${LOWERBOUND} and key_prop_id <= ${UPPERBOUND}; +--FOREACH alf_prop_link.value_prop_id system.upgrade.clean_alf_prop_tables.batchsize +insert into temp_prop_val_ref select value_prop_id from alf_prop_link where value_prop_id >= ${LOWERBOUND} and value_prop_id <= ${UPPERBOUND}; +--FOREACH alf_prop_unique_ctx.value1_prop_id system.upgrade.clean_alf_prop_tables.batchsize +insert into temp_prop_val_ref select value1_prop_id from alf_prop_unique_ctx where value1_prop_id >= ${LOWERBOUND} and value1_prop_id <= ${UPPERBOUND}; +--FOREACH alf_prop_unique_ctx.value2_prop_id system.upgrade.clean_alf_prop_tables.batchsize +insert into temp_prop_val_ref select value2_prop_id from alf_prop_unique_ctx where value2_prop_id >= ${LOWERBOUND} and value2_prop_id <= ${UPPERBOUND}; +--FOREACH alf_prop_unique_ctx.value3_prop_id system.upgrade.clean_alf_prop_tables.batchsize +insert into temp_prop_val_ref select value3_prop_id from alf_prop_unique_ctx where value3_prop_id >= ${LOWERBOUND} and value3_prop_id <= ${UPPERBOUND}; + +-- determine the obsolete entries from alf_prop_value +--FOREACH alf_prop_value.id system.upgrade.clean_alf_prop_tables.batchsize +create temp table temp_prop_val_abs as select apv.id, apv.persisted_type, apv.long_value from alf_prop_value apv left join temp_prop_val_ref on (apv.id = temp_prop_val_ref.id) where temp_prop_val_ref.id is null and apv.id >= ${LOWERBOUND} and apv.id <= ${UPPERBOUND}; +create index idx_temp_prop_val_abs_id on temp_prop_val_abs(id); +create index idx_temp_prop_val_abs_per on temp_prop_val_abs(persisted_type, id, long_value); + +-- clear the obsolete entries +--FOREACH temp_prop_val_abs.id system.upgrade.clean_alf_prop_tables.batchsize +delete from alf_prop_value where id in (select id from temp_prop_val_abs where id >= ${LOWERBOUND} and id <= ${UPPERBOUND}); + +-- find and clear obsoleted string values +create table temp_del_str as select temp_prop_val_abs.long_value as string_id from temp_prop_val_abs left join alf_prop_value apv on (apv.id = temp_prop_val_abs.id) where temp_prop_val_abs.persisted_type in (3,5,6) and apv.id is null; +--FOREACH temp_del_str.string_id system.upgrade.clean_alf_prop_tables.batchsize +delete from alf_prop_string_value where id in (select id from temp_del_str where id >= ${LOWERBOUND} and id <= ${UPPERBOUND}); + +-- find and clear obsoleted serialized values +create table temp_del_ser as select temp_prop_val_abs.long_value as string_id from temp_prop_val_abs left join alf_prop_value apv on (apv.id = temp_prop_val_abs.id) where temp_prop_val_abs.persisted_type = 4 and apv.id is null; +--FOREACH temp_del_ser.string_id system.upgrade.clean_alf_prop_tables.batchsize +delete from alf_prop_serializable_value where id in (select id from temp_del_ser where id >= ${LOWERBOUND} and id <= ${UPPERBOUND}); + +-- find and clear obsoleted double values +create table temp_del_double as select temp_prop_val_abs.long_value as string_id from temp_prop_val_abs left join alf_prop_value apv on (apv.id = temp_prop_val_abs.id) where temp_prop_val_abs.persisted_type = 2 and apv.id is null; +--FOREACH temp_del_double.string_id system.upgrade.clean_alf_prop_tables.batchsize +delete from alf_prop_double_value where id in (select id from temp_del_double where id >= ${LOWERBOUND} and id <= ${UPPERBOUND}); + +--END TXN diff --git a/config/alfresco/dbscripts/utility/org.hibernate.dialect.PostgreSQLDialect/CleanAlfPropTablesPostExec.sql b/config/alfresco/dbscripts/utility/org.hibernate.dialect.PostgreSQLDialect/CleanAlfPropTablesPostExec.sql new file mode 100644 index 0000000000..b8bf57f498 --- /dev/null +++ b/config/alfresco/dbscripts/utility/org.hibernate.dialect.PostgreSQLDialect/CleanAlfPropTablesPostExec.sql @@ -0,0 +1,12 @@ +--BEGIN TXN + +-- cleanup temporary structures +drop table temp_prop_root_ref; --(optional) +drop table temp_prop_root_abs; --(optional) +drop table temp_prop_val_ref; --(optional) +drop table temp_prop_val_abs; --(optional) +drop table temp_del_str; --(optional) +drop table temp_del_ser; --(optional) +drop table temp_del_double; --(optional) + +--END TXN \ No newline at end of file diff --git a/config/alfresco/repository.properties b/config/alfresco/repository.properties index 26bcdb6e52..86aa36d2dd 100644 --- a/config/alfresco/repository.properties +++ b/config/alfresco/repository.properties @@ -1108,4 +1108,8 @@ system.lockTryTimeout.PolicyComponentImpl=${system.lockTryTimeout} # system.patch.surfConfigFolder.deferred=false # Default value. i.e. never run. It can be triggered using JMX -system.patch.surfConfigFolder.cronExpression=* * * * * ? 2099 \ No newline at end of file +system.patch.surfConfigFolder.cronExpression=* * * * * ? 2099 + +# Scheduled job to clean up unused properties from the alf_prop_xxx tables. +# Default setting is for it never to run. +attributes.propcleaner.cronExpression=* * * * * ? 2099 diff --git a/source/java/org/alfresco/repo/attributes/PropTablesCleanupJob.java b/source/java/org/alfresco/repo/attributes/PropTablesCleanupJob.java new file mode 100644 index 0000000000..ebe3323fb7 --- /dev/null +++ b/source/java/org/alfresco/repo/attributes/PropTablesCleanupJob.java @@ -0,0 +1,49 @@ +/* + * Copyright (C) 2005-2014 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ +package org.alfresco.repo.attributes; + +import org.alfresco.repo.domain.propval.PropertyValueDAO; +import org.quartz.Job; +import org.quartz.JobDataMap; +import org.quartz.JobExecutionContext; +import org.quartz.JobExecutionException; + +/** + * Cleanup job to initiate cleaning of unused values from the alf_prop_xxx tables. + * + * @author Matt Ward + */ +public class PropTablesCleanupJob implements Job +{ + protected static final Object PROPERTY_VALUE_DAO_KEY = "propertyValueDAO"; + + @Override + public void execute(JobExecutionContext jobCtx) throws JobExecutionException + { + JobDataMap jobData = jobCtx.getJobDetail().getJobDataMap(); + + PropertyValueDAO propertyValueDAO = (PropertyValueDAO) jobData.get(PROPERTY_VALUE_DAO_KEY); + if (propertyValueDAO == null) + { + throw new IllegalArgumentException(PROPERTY_VALUE_DAO_KEY + " in job data map was null"); + } + + propertyValueDAO.cleanupUnusedValues(); + } +} diff --git a/source/java/org/alfresco/repo/domain/propval/AbstractPropertyValueDAOImpl.java b/source/java/org/alfresco/repo/domain/propval/AbstractPropertyValueDAOImpl.java index 0248fe97e5..13ba78d3f6 100644 --- a/source/java/org/alfresco/repo/domain/propval/AbstractPropertyValueDAOImpl.java +++ b/source/java/org/alfresco/repo/domain/propval/AbstractPropertyValueDAOImpl.java @@ -1588,4 +1588,15 @@ public abstract class AbstractPropertyValueDAOImpl implements PropertyValueDAO // This will have put the values into the correct containers return result; } + + protected void clearCaches() + { + propertyClassCache.clear(); + propertyDateValueCache.clear(); + propertyStringValueCache.clear(); + propertyDoubleValueCache.clear(); + propertySerializableValueCache.clear(); + propertyCache.clear(); + propertyValueCache.clear(); + } } diff --git a/source/java/org/alfresco/repo/domain/propval/PropertyValueDAO.java b/source/java/org/alfresco/repo/domain/propval/PropertyValueDAO.java index be3b223aad..6c5df7d40e 100644 --- a/source/java/org/alfresco/repo/domain/propval/PropertyValueDAO.java +++ b/source/java/org/alfresco/repo/domain/propval/PropertyValueDAO.java @@ -362,4 +362,9 @@ public interface PropertyValueDAO * @throws IllegalArgumentException if rows don't all share the same root property ID */ Serializable convertPropertyIdSearchRows(List rows); + + /** + * Remove orphaned properties. + */ + void cleanupUnusedValues(); } diff --git a/source/java/org/alfresco/repo/domain/propval/ibatis/PropertyValueDAOImpl.java b/source/java/org/alfresco/repo/domain/propval/ibatis/PropertyValueDAOImpl.java index d37ab556f9..d6b2b36344 100644 --- a/source/java/org/alfresco/repo/domain/propval/ibatis/PropertyValueDAOImpl.java +++ b/source/java/org/alfresco/repo/domain/propval/ibatis/PropertyValueDAOImpl.java @@ -38,6 +38,7 @@ import org.alfresco.repo.domain.propval.PropertyStringValueEntity; import org.alfresco.repo.domain.propval.PropertyUniqueContextEntity; import org.alfresco.repo.domain.propval.PropertyValueEntity; import org.alfresco.repo.domain.propval.PropertyValueEntity.PersistedType; +import org.alfresco.repo.domain.schema.script.ScriptBundleExecutor; import org.alfresco.util.Pair; import org.apache.ibatis.session.ResultContext; import org.apache.ibatis.session.ResultHandler; @@ -98,11 +99,18 @@ public class PropertyValueDAOImpl extends AbstractPropertyValueDAOImpl private SqlSessionTemplate template; + private ScriptBundleExecutor scriptExecutor; + public final void setSqlSessionTemplate(SqlSessionTemplate sqlSessionTemplate) { this.template = sqlSessionTemplate; } + public void setScriptExecutor(ScriptBundleExecutor scriptExecutor) + { + this.scriptExecutor = scriptExecutor; + } + //================================ // 'alf_prop_class' accessors @@ -672,4 +680,31 @@ public class PropertyValueDAOImpl extends AbstractPropertyValueDAOImpl entity.setId(rootPropId); return template.delete(DELETE_PROPERTY_LINKS_BY_ROOT_ID, entity); } + + @Override + public void cleanupUnusedValues() + { + // execute clean up in case of previous failures + scriptExecutor.exec("alfresco/dbscripts/utility/${db.script.dialect}", "CleanAlfPropTablesPostExec.sql"); + try + { + scriptExecutor.exec("alfresco/dbscripts/utility/${db.script.dialect}", "CleanAlfPropTables.sql"); + } + finally + { + try + { + // execute clean up + scriptExecutor.exec("alfresco/dbscripts/utility/${db.script.dialect}", "CleanAlfPropTablesPostExec.sql"); + } + catch (Exception e) + { + if (logger.isErrorEnabled()) + { + logger.error("The cleanup failed with an error: ", e); + } + } + clearCaches(); + } + } } diff --git a/source/java/org/alfresco/repo/domain/schema/script/ScriptBundleExecutor.java b/source/java/org/alfresco/repo/domain/schema/script/ScriptBundleExecutor.java new file mode 100644 index 0000000000..0e4bc1e764 --- /dev/null +++ b/source/java/org/alfresco/repo/domain/schema/script/ScriptBundleExecutor.java @@ -0,0 +1,45 @@ +/* + * Copyright (C) 2005-2014 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ +package org.alfresco.repo.domain.schema.script; + +/** + * Executes a set of zero or more SQL scripts. + * + * @author Matt Ward + */ +public interface ScriptBundleExecutor +{ + /** + * Runs a bundle of scripts. If any script within the bundle fails, then the rest of the files are not run. + * + * @param dir Directory where the script bundle may be found. + * @param scripts Names of the SQL scripts to run, relative to the specified directory. + */ + void exec(String dir, String... scripts); + + /** + * Runs a bundle of scripts. If any script within the bundle fails, then the rest of the files + * are not run, with the exception of postScript - which is always run (a clean-up script for example). + * + * @param dir Directory where the script bundle may be found. + * @param postScript A script that is always run after the other scripts. + * @param scripts Names of the SQL scripts to run, relative to the specified directory. + */ + void execWithPostScript(String dir, String postScript, String... scripts); +} diff --git a/source/java/org/alfresco/repo/domain/schema/script/ScriptBundleExecutorImpl.java b/source/java/org/alfresco/repo/domain/schema/script/ScriptBundleExecutorImpl.java new file mode 100644 index 0000000000..e58a246568 --- /dev/null +++ b/source/java/org/alfresco/repo/domain/schema/script/ScriptBundleExecutorImpl.java @@ -0,0 +1,74 @@ +/* + * Copyright (C) 2005-2014 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ +package org.alfresco.repo.domain.schema.script; + +import java.io.File; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * {@link ScriptBundleExecutor} implementation. Uses the supplied {@link ScriptExecutor} + * to invoke multiple SQL scripts in a particular directory. + * + * @author Matt Ward + */ +public class ScriptBundleExecutorImpl implements ScriptBundleExecutor +{ + private ScriptExecutor scriptExecutor; + protected Log log = LogFactory.getLog(ScriptBundleExecutorImpl.class); + + public ScriptBundleExecutorImpl(ScriptExecutor scriptExecutor) + { + this.scriptExecutor = scriptExecutor; + } + + @Override + public void exec(String dir, String... scripts) + { + for (String name : scripts) + { + File file = new File(dir, name); + try + { + scriptExecutor.executeScriptUrl(file.getPath()); + } + catch (Throwable e) + { + log.error("Unable to run SQL script: dir=" + dir + ", name="+name, e); + // Do not run any more scripts. + break; + } + } + } + + @Override + public void execWithPostScript(String dir, String postScript, String... scripts) + { + try + { + exec(dir, scripts); + } + finally + { + // Always run the post-script. + exec(dir, postScript); + } + } +} diff --git a/source/java/org/alfresco/repo/domain/schema/script/ScriptExecutor.java b/source/java/org/alfresco/repo/domain/schema/script/ScriptExecutor.java new file mode 100644 index 0000000000..7bdcbc9742 --- /dev/null +++ b/source/java/org/alfresco/repo/domain/schema/script/ScriptExecutor.java @@ -0,0 +1,29 @@ +/* + * Copyright (C) 2005-2014 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ +package org.alfresco.repo.domain.schema.script; + +/** + * Defines a SQL script executor that executes a single SQL script. + * + * @author Matt Ward + */ +public interface ScriptExecutor +{ + void executeScriptUrl(String scriptUrl) throws Exception; +} diff --git a/source/java/org/alfresco/repo/domain/schema/script/ScriptExecutorImpl.java b/source/java/org/alfresco/repo/domain/schema/script/ScriptExecutorImpl.java new file mode 100644 index 0000000000..51fdc49b62 --- /dev/null +++ b/source/java/org/alfresco/repo/domain/schema/script/ScriptExecutorImpl.java @@ -0,0 +1,597 @@ +/* + * Copyright (C) 2005-2014 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ +package org.alfresco.repo.domain.schema.script; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +import javax.sql.DataSource; + +import org.alfresco.error.AlfrescoRuntimeException; +import org.alfresco.repo.content.filestore.FileContentWriter; +import org.alfresco.service.cmr.repository.ContentWriter; +import org.alfresco.util.LogUtil; +import org.alfresco.util.TempFileProvider; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.cfg.Configuration; +import org.hibernate.dialect.Dialect; +import org.hibernate.dialect.MySQLInnoDBDialect; +import org.hibernate.dialect.PostgreSQLDialect; +import org.springframework.core.io.Resource; +import org.springframework.core.io.support.PathMatchingResourcePatternResolver; +import org.springframework.core.io.support.ResourcePatternResolver; +import org.springframework.orm.hibernate3.LocalSessionFactoryBean; + + +public class ScriptExecutorImpl implements ScriptExecutor +{ + /** The placeholder for the configured Dialect class name: ${db.script.dialect} */ + private static final String PLACEHOLDER_DIALECT = "\\$\\{db\\.script\\.dialect\\}"; + /** The global property containing the default batch size used by --FOREACH */ + private static final String PROPERTY_DEFAULT_BATCH_SIZE = "system.upgrade.default.batchsize"; + private static final String MSG_EXECUTING_GENERATED_SCRIPT = "schema.update.msg.executing_generated_script"; + private static final String MSG_EXECUTING_COPIED_SCRIPT = "schema.update.msg.executing_copied_script"; + private static final String MSG_EXECUTING_STATEMENT = "schema.update.msg.executing_statement"; + private static final String MSG_OPTIONAL_STATEMENT_FAILED = "schema.update.msg.optional_statement_failed"; + private static final String ERR_STATEMENT_FAILED = "schema.update.err.statement_failed"; + private static final String ERR_SCRIPT_NOT_FOUND = "schema.update.err.script_not_found"; + private static final String ERR_STATEMENT_INCLUDE_BEFORE_SQL = "schema.update.err.statement_include_before_sql"; + private static final String ERR_STATEMENT_VAR_ASSIGNMENT_BEFORE_SQL = "schema.update.err.statement_var_assignment_before_sql"; + private static final String ERR_STATEMENT_VAR_ASSIGNMENT_FORMAT = "schema.update.err.statement_var_assignment_format"; + private static final String ERR_STATEMENT_TERMINATOR = "schema.update.err.statement_terminator"; + private static final int DEFAULT_MAX_STRING_LENGTH = 1024; + private static volatile int maxStringLength = DEFAULT_MAX_STRING_LENGTH; + private Dialect dialect; + private ResourcePatternResolver rpr = new PathMatchingResourcePatternResolver(this.getClass().getClassLoader()); + private static Log logger = LogFactory.getLog(ScriptExecutorImpl.class); + private LocalSessionFactoryBean localSessionFactory; + private Properties globalProperties; + private ThreadLocal executedStatementsThreadLocal = new ThreadLocal(); + private DataSource dataSource; + + + /** + * @return Returns the maximum number of characters that a string field can be + */ + public static final int getMaxStringLength() + { + return ScriptExecutorImpl.maxStringLength; + } + + /** + * Truncates or returns a string that will fit into the string columns in the schema. Text fields can + * either cope with arbitrarily long text fields or have the default limit, {@link #DEFAULT_MAX_STRING_LENGTH}. + * + * @param value the string to check + * @return Returns a string that is short enough for {@link ScriptExecutorImpl#getMaxStringLength()} + * + * @since 3.2 + */ + public static final String trimStringForTextFields(String value) + { + if (value != null && value.length() > maxStringLength) + { + return value.substring(0, maxStringLength); + } + else + { + return value; + } + } + + /** + * Sets the previously auto-detected Hibernate dialect. + * + * @param dialect + * the dialect + */ + public void setDialect(Dialect dialect) + { + this.dialect = dialect; + } + + public ScriptExecutorImpl() + { + globalProperties = new Properties(); + } + + + public void setLocalSessionFactory(LocalSessionFactoryBean localSessionFactory) + { + this.localSessionFactory = localSessionFactory; + } + + public LocalSessionFactoryBean getLocalSessionFactory() + { + return localSessionFactory; + } + + public void setDataSource(DataSource dataSource) + { + this.dataSource = dataSource; + } + + /** + * Sets the properties map from which we look up some configuration settings. + * + * @param globalProperties + * the global properties + */ + public void setGlobalProperties(Properties globalProperties) + { + this.globalProperties = globalProperties; + } + + + @Override + public void executeScriptUrl(String scriptUrl) throws Exception + { + Configuration cfg = localSessionFactory.getConfiguration(); + Connection connection = dataSource.getConnection(); + connection.setAutoCommit(true); + try + { + executeScriptUrl(cfg, connection, scriptUrl); + } + finally + { + connection.close(); + } + } + + private void executeScriptUrl(Configuration cfg, Connection connection, String scriptUrl) throws Exception + { + Dialect dialect = Dialect.getDialect(cfg.getProperties()); + String dialectStr = dialect.getClass().getSimpleName(); + InputStream scriptInputStream = getScriptInputStream(dialect.getClass(), scriptUrl); + // check that it exists + if (scriptInputStream == null) + { + throw AlfrescoRuntimeException.create(ERR_SCRIPT_NOT_FOUND, scriptUrl); + } + // write the script to a temp location for future and failure reference + File tempFile = null; + try + { + tempFile = TempFileProvider.createTempFile("AlfrescoSchema-" + dialectStr + "-Update-", ".sql"); + ContentWriter writer = new FileContentWriter(tempFile); + writer.putContent(scriptInputStream); + } + finally + { + try { scriptInputStream.close(); } catch (Throwable e) {} // usually a duplicate close + } + // now execute it + String dialectScriptUrl = scriptUrl.replaceAll(PLACEHOLDER_DIALECT, dialect.getClass().getName()); + // Replace the script placeholders + executeScriptFile(cfg, connection, tempFile, dialectScriptUrl); + } + + /** + * Replaces the dialect placeholder in the resource URL and attempts to find a file for + * it. If not found, the dialect hierarchy will be walked until a compatible resource is + * found. This makes it possible to have resources that are generic to all dialects. + * + * @return The Resource, otherwise null + */ + private Resource getDialectResource(Class dialectClass, String resourceUrl) + { + // replace the dialect placeholder + String dialectResourceUrl = resolveDialectUrl(dialectClass, resourceUrl); + // get a handle on the resource + Resource resource = rpr.getResource(dialectResourceUrl); + if (!resource.exists()) + { + // it wasn't found. Get the superclass of the dialect and try again + Class superClass = dialectClass.getSuperclass(); + if (Dialect.class.isAssignableFrom(superClass)) + { + // we still have a Dialect - try again + return getDialectResource(superClass, resourceUrl); + } + else + { + // we have exhausted all options + return null; + } + } + else + { + // we have a handle to it + return resource; + } + } + + /** + * Takes resource URL containing the {@link ScriptExecutorImpl#PLACEHOLDER_DIALECT dialect placeholder text} + * and substitutes the placeholder with the name of the given dialect's class. + *

+ * For example: + *

+     *   resolveDialectUrl(MySQLInnoDBDialect.class, "classpath:alfresco/db/${db.script.dialect}/myfile.xml")
+     * 
+ * would give the following String: + *
+     *   classpath:alfresco/db/org.hibernate.dialect.MySQLInnoDBDialect/myfile.xml
+     * 
+ * + * @param dialectClass + * @param resourceUrl + * @return + */ + private String resolveDialectUrl(Class dialectClass, String resourceUrl) + { + return resourceUrl.replaceAll(PLACEHOLDER_DIALECT, dialectClass.getName()); + } + + /** + * Replaces the dialect placeholder in the script URL and attempts to find a file for + * it. If not found, the dialect hierarchy will be walked until a compatible script is + * found. This makes it possible to have scripts that are generic to all dialects. + * + * @return Returns an input stream onto the script, otherwise null + */ + private InputStream getScriptInputStream(Class dialectClazz, String scriptUrl) throws Exception + { + Resource resource = getDialectResource(dialectClazz, scriptUrl); + if (resource == null) + { + return null; + } + return resource.getInputStream(); + } + + /** + * @param cfg the Hibernate configuration + * @param connection the DB connection to use + * @param scriptFile the file containing the statements + * @param scriptUrl the URL of the script to report. If this is null, the script + * is assumed to have been auto-generated. + */ + private void executeScriptFile( + Configuration cfg, + Connection connection, + File scriptFile, + String scriptUrl) throws Exception + { + final Dialect dialect = Dialect.getDialect(cfg.getProperties()); + + StringBuilder executedStatements = executedStatementsThreadLocal.get(); + if (executedStatements == null) + { + executedStatements = new StringBuilder(8094); + executedStatementsThreadLocal.set(executedStatements); + } + + if (scriptUrl == null) + { + LogUtil.info(logger, MSG_EXECUTING_GENERATED_SCRIPT, scriptFile); + } + else + { + LogUtil.info(logger, MSG_EXECUTING_COPIED_SCRIPT, scriptFile, scriptUrl); + } + + InputStream scriptInputStream = new FileInputStream(scriptFile); + BufferedReader reader = new BufferedReader(new InputStreamReader(scriptInputStream, "UTF-8")); + try + { + int line = 0; + // loop through all statements + StringBuilder sb = new StringBuilder(1024); + String fetchVarName = null; + String fetchColumnName = null; + String batchTableName = null; + boolean doBatch = false; + int batchUpperLimit = 0; + int batchSize = 1; + Map varAssignments = new HashMap(13); + // Special variable assignments: + if (dialect instanceof PostgreSQLDialect) + { + // Needs 1/0 for true/false + varAssignments.put("true", "true"); + varAssignments.put("false", "false"); + varAssignments.put("TRUE", "TRUE"); + varAssignments.put("FALSE", "FALSE"); + } + else + { + // Needs true/false as strings + varAssignments.put("true", "1"); + varAssignments.put("false", "0"); + varAssignments.put("TRUE", "1"); + varAssignments.put("FALSE", "0"); + } + long now = System.currentTimeMillis(); + varAssignments.put("now", new Long(now).toString()); + varAssignments.put("NOW", new Long(now).toString()); + + while(true) + { + String sqlOriginal = reader.readLine(); + line++; + + if (sqlOriginal == null) + { + // nothing left in the file + break; + } + + // trim it + String sql = sqlOriginal.trim(); + // Check of includes + if (sql.startsWith("--INCLUDE:")) + { + if (sb.length() > 0) + { + // This can only be set before a new SQL statement + throw AlfrescoRuntimeException.create(ERR_STATEMENT_INCLUDE_BEFORE_SQL, (line - 1), scriptUrl); + } + String includedScriptUrl = sql.substring(10, sql.length()); + // Execute the script in line + executeScriptUrl(cfg, connection, includedScriptUrl); + } + // Check for variable assignment + else if (sql.startsWith("--ASSIGN:")) + { + if (sb.length() > 0) + { + // This can only be set before a new SQL statement + throw AlfrescoRuntimeException.create(ERR_STATEMENT_VAR_ASSIGNMENT_BEFORE_SQL, (line - 1), scriptUrl); + } + String assignStr = sql.substring(9, sql.length()); + String[] assigns = assignStr.split("="); + if (assigns.length != 2 || assigns[0].length() == 0 || assigns[1].length() == 0) + { + throw AlfrescoRuntimeException.create(ERR_STATEMENT_VAR_ASSIGNMENT_FORMAT, (line - 1), scriptUrl); + } + fetchVarName = assigns[0]; + fetchColumnName = assigns[1]; + continue; + } + // Handle looping control + else if (sql.startsWith("--FOREACH")) + { + // --FOREACH table.column batch.size.property + String[] args = sql.split("[ \\t]+"); + int sepIndex; + if (args.length == 3 && (sepIndex = args[1].indexOf('.')) != -1) + { + doBatch = true; + // Select the upper bound of the table column + batchTableName = args[1].substring(0, sepIndex); + String stmt = "SELECT MAX(" + args[1].substring(sepIndex+1) + ") AS upper_limit FROM " + batchTableName; + Object fetchedVal = executeStatement(connection, stmt, "upper_limit", false, line, scriptFile); + if (fetchedVal instanceof Number) + { + batchUpperLimit = ((Number)fetchedVal).intValue(); + // Read the batch size from the named property + String batchSizeString = globalProperties.getProperty(args[2]); + // Fall back to the default property + if (batchSizeString == null) + { + batchSizeString = globalProperties.getProperty(PROPERTY_DEFAULT_BATCH_SIZE); + } + batchSize = batchSizeString == null ? 10000 : Integer.parseInt(batchSizeString); + } + } + continue; + } + // Allow transaction delineation + else if (sql.startsWith("--BEGIN TXN")) + { + connection.setAutoCommit(false); + continue; + } + else if (sql.startsWith("--END TXN")) + { + connection.commit(); + connection.setAutoCommit(true); + continue; + } + + // Check for comments + if (sql.length() == 0 || + sql.startsWith( "--" ) || + sql.startsWith( "//" ) || + sql.startsWith( "/*" ) ) + { + if (sb.length() > 0) + { + // we have an unterminated statement + throw AlfrescoRuntimeException.create(ERR_STATEMENT_TERMINATOR, (line - 1), scriptUrl); + } + // there has not been anything to execute - it's just a comment line + continue; + } + // have we reached the end of a statement? + boolean execute = false; + boolean optional = false; + if (sql.endsWith(";")) + { + sql = sql.substring(0, sql.length() - 1); + execute = true; + optional = false; + } + else if (sql.endsWith("(optional)") || sql.endsWith("(OPTIONAL)")) + { + // Get the end of statement + int endIndex = sql.lastIndexOf(';'); + if (endIndex > -1) + { + sql = sql.substring(0, endIndex); + execute = true; + optional = true; + } + else + { + // Ends with "(optional)" but there is no semi-colon. + // Just take it at face value and probably fail. + } + } + // Add newline + if (sb.length() > 0) + { + sb.append("\n"); + } + // Add leading whitespace for formatting + int whitespaceCount = sqlOriginal.indexOf(sql); + for (int i = 0; i < whitespaceCount; i++) + { + sb.append(" "); + } + // append to the statement being built up + sb.append(sql); + // execute, if required + if (execute) + { + // Now substitute and execute the statement the appropriate number of times + String unsubstituted = sb.toString(); + for(int lowerBound = 0; lowerBound <= batchUpperLimit; lowerBound += batchSize) + { + sql = unsubstituted; + + // Substitute in the next pair of range parameters + if (doBatch) + { + logger.info("Processing from " + lowerBound + " to " + (lowerBound + batchSize) + " rows of " + batchUpperLimit + " rows from table " + batchTableName + "."); + varAssignments.put("LOWERBOUND", String.valueOf(lowerBound)); + varAssignments.put("UPPERBOUND", String.valueOf(lowerBound + batchSize - 1)); + } + + // Perform variable replacement using the ${var} format + for (Map.Entry entry : varAssignments.entrySet()) + { + String var = entry.getKey(); + Object val = entry.getValue(); + sql = sql.replaceAll("\\$\\{" + var + "\\}", val.toString()); + } + + // Handle the 0/1 values that PostgreSQL doesn't translate to TRUE + if (this.dialect != null && this.dialect instanceof PostgreSQLDialect) + { + sql = sql.replaceAll("\\$\\{TRUE\\}", "TRUE"); + } + else + { + sql = sql.replaceAll("\\$\\{TRUE\\}", "1"); + } + + if (this.dialect != null && this.dialect instanceof MySQLInnoDBDialect) + { + // note: enable bootstrap on MySQL 5.5 (eg. for auto-generated SQL, such as JBPM) + sql = sql.replaceAll("(?i)TYPE=InnoDB", "ENGINE=InnoDB"); + } + + Object fetchedVal = executeStatement(connection, sql, fetchColumnName, optional, line, scriptFile); + if (fetchVarName != null && fetchColumnName != null) + { + varAssignments.put(fetchVarName, fetchedVal); + } + } + sb.setLength(0); + fetchVarName = null; + fetchColumnName = null; + batchTableName = null; + doBatch = false; + batchUpperLimit = 0; + batchSize = 1; + } + } + } + finally + { + try { reader.close(); } catch (Throwable e) {} + try { scriptInputStream.close(); } catch (Throwable e) {} + } + } + + /** + * Execute the given SQL statement, absorbing exceptions that we expect during + * schema creation or upgrade. + * + * @param fetchColumnName the name of the column value to return + */ + private Object executeStatement( + Connection connection, + String sql, + String fetchColumnName, + boolean optional, + int line, + File file) throws Exception + { + StringBuilder executedStatements = executedStatementsThreadLocal.get(); + if (executedStatements == null) + { + throw new IllegalArgumentException("The executedStatementsThreadLocal must be populated"); + } + + Statement stmt = connection.createStatement(); + Object ret = null; + try + { + if (logger.isDebugEnabled()) + { + LogUtil.debug(logger, MSG_EXECUTING_STATEMENT, sql); + } + boolean haveResults = stmt.execute(sql); + // Record the statement + executedStatements.append(sql).append(";\n\n"); + if (haveResults && fetchColumnName != null) + { + ResultSet rs = stmt.getResultSet(); + if (rs.next()) + { + // Get the result value + ret = rs.getObject(fetchColumnName); + } + } + } + catch (SQLException e) + { + if (optional) + { + // it was marked as optional, so we just ignore it + LogUtil.debug(logger, MSG_OPTIONAL_STATEMENT_FAILED, sql, e.getMessage(), file.getAbsolutePath(), line); + } + else + { + LogUtil.error(logger, ERR_STATEMENT_FAILED, sql, e.getMessage(), file.getAbsolutePath(), line); + throw e; + } + } + finally + { + try { stmt.close(); } catch (Throwable e) {} + } + return ret; + } +} diff --git a/source/test-java/org/alfresco/Repository01TestSuite.java b/source/test-java/org/alfresco/Repository01TestSuite.java index e2d40cccc9..7a46863505 100644 --- a/source/test-java/org/alfresco/Repository01TestSuite.java +++ b/source/test-java/org/alfresco/Repository01TestSuite.java @@ -419,6 +419,8 @@ public class Repository01TestSuite extends TestSuite suite.addTest(new JUnit4TestAdapter(org.alfresco.util.CronTriggerBeanSystemTest.class)); suite.addTest(new JUnit4TestAdapter(org.alfresco.filesys.auth.cifs.CifsAuthenticatorKerberosTest.class)); suite.addTest(new JUnit4TestAdapter(org.alfresco.filesys.auth.cifs.CifsAuthenticatorPassthruTest.class)); + suite.addTest(new JUnit4TestAdapter(org.alfresco.repo.domain.schema.script.ScriptExecutorImplIntegrationTest.class)); + suite.addTest(new JUnit4TestAdapter(org.alfresco.repo.domain.schema.script.ScriptBundleExecutorImplIntegrationTest.class)); } static void tests65(TestSuite suite) diff --git a/source/test-java/org/alfresco/repo/attributes/AttributeServiceTest.java b/source/test-java/org/alfresco/repo/attributes/AttributeServiceTest.java index 08f25aed93..a44005e88b 100644 --- a/source/test-java/org/alfresco/repo/attributes/AttributeServiceTest.java +++ b/source/test-java/org/alfresco/repo/attributes/AttributeServiceTest.java @@ -20,13 +20,18 @@ package org.alfresco.repo.attributes; import java.io.Serializable; import java.util.ArrayList; +import java.util.Date; import java.util.List; import junit.framework.TestCase; +import org.alfresco.repo.domain.propval.PropValGenerator; +import org.alfresco.repo.domain.propval.PropertyValueDAO; +import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.service.cmr.attributes.AttributeService; import org.alfresco.service.cmr.attributes.AttributeService.AttributeQueryCallback; import org.alfresco.util.ApplicationContextHelper; +import org.alfresco.util.Pair; import org.apache.commons.lang.mutable.MutableInt; import org.springframework.context.ApplicationContext; @@ -47,12 +52,14 @@ public class AttributeServiceTest extends TestCase private ApplicationContext ctx; private AttributeService attributeService; + private PropertyValueDAO propertyValueDAO; @Override protected void setUp() throws Exception { ctx = ApplicationContextHelper.getApplicationContext(); attributeService = (AttributeService) ctx.getBean("AttributeService"); + propertyValueDAO = (PropertyValueDAO) ctx.getBean("propertyValueDAO"); } @Override @@ -132,4 +139,57 @@ public class AttributeServiceTest extends TestCase assertEquals(2, results.size()); assertEquals(2, counter.getValue()); } + + public void testRemoveOrphanedProps() + { + final Serializable[] stringKey = new String[] { "z", "q", "string" }; + final Serializable[] doubleKey = new String[] { "z", "q", "double" }; + final Serializable[] dateKey = new String[] { "z", "q", "date" }; + + // Make sure there's nothing left from previous failed test runs etc. + attributeService.removeAttributes(stringKey); + attributeService.removeAttributes(doubleKey); + attributeService.removeAttributes(dateKey); + + final PropValGenerator valueGen = new PropValGenerator(propertyValueDAO); + + // Create some values + final String stringValue = valueGen.createUniqueString(); + attributeService.createAttribute(stringValue, stringKey); + + final Double doubleValue = valueGen.createUniqueDouble(); + attributeService.createAttribute(doubleValue, doubleKey); + + final Date dateValue = valueGen.createUniqueDate(); + attributeService.createAttribute(dateValue, dateKey); + + // Remove the properties, potentially leaving oprhaned prop values. + attributeService.removeAttributes(stringKey); + attributeService.removeAttributes(doubleKey); + attributeService.removeAttributes(dateKey); + + // Check there are some persisted values to delete, otherwise there is no + // need to run the cleanup script in the first place. + assertEquals(stringValue, propertyValueDAO.getPropertyValue(stringValue).getSecond()); + assertEquals(doubleValue, propertyValueDAO.getPropertyValue(doubleValue).getSecond()); + assertEquals(dateValue, propertyValueDAO.getPropertyValue(dateValue).getSecond()); + + // Run the cleanup script - should remove the orphaned values. + propertyValueDAO.cleanupUnusedValues(); + + // Check that the cleanup script removed the orphaned values. + assertPropDeleted(propertyValueDAO.getPropertyValue(stringValue)); + assertPropDeleted(propertyValueDAO.getPropertyValue(doubleValue)); + assertPropDeleted(propertyValueDAO.getPropertyValue(dateValue)); + } + + private void assertPropDeleted(Pair value) + { + if (value != null) + { + String msg = String.format("Property value [%s=%s] should have been deleted by cleanup script.", + value.getSecond().getClass().getSimpleName(), value.getSecond()); + fail(msg); + } + } } diff --git a/source/test-java/org/alfresco/repo/attributes/PropTablesCleanupJobIntegrationTest.java b/source/test-java/org/alfresco/repo/attributes/PropTablesCleanupJobIntegrationTest.java new file mode 100644 index 0000000000..6a3d1d1283 --- /dev/null +++ b/source/test-java/org/alfresco/repo/attributes/PropTablesCleanupJobIntegrationTest.java @@ -0,0 +1,79 @@ +/* + * Copyright (C) 2005-2014 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ +package org.alfresco.repo.attributes; + +import static org.junit.Assert.*; + +import java.util.Date; + +import org.alfresco.repo.domain.propval.PropertyValueDAO; +import org.alfresco.util.ApplicationContextHelper; +import org.alfresco.util.CronTriggerBean; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.quartz.JobDetail; +import org.quartz.SchedulerException; +import org.springframework.context.ApplicationContext; + +import com.ibm.icu.util.Calendar; + +/** + * Integration tests for the {@link PropTablesCleanupJob} class. + * + * @author Matt Ward + */ +public class PropTablesCleanupJobIntegrationTest +{ + private static ApplicationContext ctx; + private CronTriggerBean jobTrigger; + + @BeforeClass + public static void setUpClass() + { + ctx = ApplicationContextHelper.getApplicationContext(); + } + + @Before + public void setUp() throws Exception + { + jobTrigger = ctx.getBean("propTablesCleanupTrigger", CronTriggerBean.class); + } + + @Test + public void checkJobWillNeverRunByDefault() throws Exception + { + Date fireTime = jobTrigger.getTrigger().getFireTimeAfter(new Date()); + Calendar calendar = Calendar.getInstance(); + + // Far into the future, we count this as never. + calendar.setTime(fireTime); + assertEquals(2099, calendar.get(Calendar.YEAR)); + + } + + @Test + public void checkJobDetails() + { + JobDetail jobDetail = jobTrigger.getJobDetail(); + assertEquals(PropTablesCleanupJob.class, jobDetail.getJobClass()); + assertTrue("JobDetail did not contain PropertyValueDAO reference", + jobDetail.getJobDataMap().get("propertyValueDAO") instanceof PropertyValueDAO); + } +} diff --git a/source/test-java/org/alfresco/repo/attributes/PropTablesCleanupJobTest.java b/source/test-java/org/alfresco/repo/attributes/PropTablesCleanupJobTest.java new file mode 100644 index 0000000000..7ec2aa1671 --- /dev/null +++ b/source/test-java/org/alfresco/repo/attributes/PropTablesCleanupJobTest.java @@ -0,0 +1,63 @@ +package org.alfresco.repo.attributes; + +import static org.junit.Assert.*; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import org.alfresco.repo.domain.propval.PropertyValueDAO; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; +import org.quartz.JobDetail; +import org.quartz.JobExecutionContext; +import org.quartz.JobExecutionException; + + +/** + * Tests for the {@link PropTablesCleanupJob} class. + * + * @author Matt Ward + */ +@RunWith(MockitoJUnitRunner.class) +public class PropTablesCleanupJobTest +{ + private PropTablesCleanupJob cleanupJob; + private @Mock JobExecutionContext jobCtx; + private @Mock PropertyValueDAO propValueDAO; + private JobDetail jobDetail; + + @Before + public void setUp() throws Exception + { + jobDetail = new JobDetail("propTablesCleanupJob", PropTablesCleanupJob.class); + jobDetail.getJobDataMap().put(PropTablesCleanupJob.PROPERTY_VALUE_DAO_KEY, propValueDAO); + cleanupJob = new PropTablesCleanupJob(); + + when(jobCtx.getJobDetail()).thenReturn(jobDetail); + } + + @Test + public void testExecute() throws JobExecutionException + { + cleanupJob.execute(jobCtx); + + verify(propValueDAO).cleanupUnusedValues(); + } + + @Test(expected=IllegalArgumentException.class) + public void testMissingPropertyValueDAO() throws JobExecutionException + { + jobDetail.getJobDataMap().put(PropTablesCleanupJob.PROPERTY_VALUE_DAO_KEY, null); + cleanupJob.execute(jobCtx); + } + + @Test(expected=ClassCastException.class) + public void testWrongTypeForPropertyValueDAO() throws JobExecutionException + { + jobDetail.getJobDataMap().put(PropTablesCleanupJob.PROPERTY_VALUE_DAO_KEY, "This is not a PropertyValueDAO"); + cleanupJob.execute(jobCtx); + } + +} diff --git a/source/test-java/org/alfresco/repo/domain/audit/AuditDAOTest.java b/source/test-java/org/alfresco/repo/domain/audit/AuditDAOTest.java index bf66dd76b2..32e587a5ac 100644 --- a/source/test-java/org/alfresco/repo/domain/audit/AuditDAOTest.java +++ b/source/test-java/org/alfresco/repo/domain/audit/AuditDAOTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2005-2010 Alfresco Software Limited. + * Copyright (C) 2005-2014 Alfresco Software Limited. * * This file is part of Alfresco * @@ -23,16 +23,22 @@ import java.io.IOException; import java.io.Serializable; import java.net.URL; import java.util.Collections; +import java.util.Date; import java.util.HashMap; import java.util.LinkedList; +import java.util.List; import java.util.Map; +import javax.transaction.UserTransaction; + import junit.framework.TestCase; import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.repo.content.transform.AbstractContentTransformerTest; import org.alfresco.repo.domain.audit.AuditDAO.AuditApplicationInfo; import org.alfresco.repo.domain.contentdata.ContentDataDAO; +import org.alfresco.repo.domain.propval.PropValGenerator; +import org.alfresco.repo.domain.propval.PropertyValueDAO; import org.alfresco.repo.transaction.RetryingTransactionHelper; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.service.ServiceRegistry; @@ -62,6 +68,7 @@ public class AuditDAOTest extends TestCase private TransactionService transactionService; private RetryingTransactionHelper txnHelper; private AuditDAO auditDAO; + private PropertyValueDAO propertyValueDAO; @Override public void setUp() throws Exception @@ -71,6 +78,7 @@ public class AuditDAOTest extends TestCase txnHelper = transactionService.getRetryingTransactionHelper(); auditDAO = (AuditDAO) ctx.getBean("auditDAO"); + propertyValueDAO = ctx.getBean(PropertyValueDAO.class); } public void testAuditModel() throws Exception @@ -527,4 +535,168 @@ public class AuditDAOTest extends TestCase return 0; } } + + + /** + * MNT-10067: use a script to delete the orphaned audit data (property values). + */ + public void testScriptCanDeleteOrphanedProps() throws Exception + { + // single test + scriptCanDeleteOrphanedPropsWork(false); + } + + private void scriptCanDeleteOrphanedPropsWork(final boolean performance) throws Exception + { + final int iterationStep, maxIterations; + if (performance) + { + iterationStep = 1000; + maxIterations = 1000; + } + else + { + iterationStep = 1; + maxIterations = 1; + } + + UserTransaction txn; + + for (int i = iterationStep; i <= maxIterations*iterationStep; i+=iterationStep) + { + List stringValues = new LinkedList(); + List doubleValues = new LinkedList(); + List dateValues = new LinkedList(); + + txn = transactionService.getUserTransaction(); + long startCreate = System.currentTimeMillis(); + txn.begin(); + for (int j = 0; j < i; j++) + { + PropValGenerator valueGen = new PropValGenerator(propertyValueDAO); + String stringValue = valueGen.createUniqueString(); + stringValues.add(stringValue); + Double doubleValue = valueGen.createUniqueDouble(); + doubleValues.add(doubleValue); + Date dateValue = valueGen.createUniqueDate(); + dateValues.add(dateValue); + + AuditQueryCallbackImpl preDeleteCallback = new AuditQueryCallbackImpl(); + AuditQueryCallbackImpl resultsCallback = new AuditQueryCallbackImpl(); + + AuditApplicationInfo info1 = createAuditApp(); + String app1 = info1.getName(); + + String username = "alexi"; + Map values = new HashMap(); + values.put("/a/b/string-" + j, stringValue); + values.put("/a/b/double-" + j, doubleValue); + values.put("/a/b/date-" + j, dateValue); + // TODO: how to deal with Serializable values which cannot be retrieved later in test by value alone? + long now = System.currentTimeMillis(); + auditDAO.createAuditEntry(info1.getId(), now, username, values); + + auditDAO.findAuditEntries(preDeleteCallback, new AuditQueryParameters(), -1); + assertEquals(1, preDeleteCallback.numEntries(app1)); + + // Delete audit entries between times - for all applications. + auditDAO.deleteAuditEntries(info1.getId(), null, null); + + if (!performance) + { + auditDAO.findAuditEntries(resultsCallback, new AuditQueryParameters(), -1); + assertEquals("All entries should have been deleted from app1", 0, resultsCallback.numEntries(app1)); + } + } + txn.commit(); + System.out.println("Created values for " + i + " entries in " + (System.currentTimeMillis() - startCreate) + " ms."); + + txn = transactionService.getUserTransaction(); + txn.begin(); + if (!performance) + { + // Check there are some persisted values to delete. + // Unlike PropertyValueDAOTest we're using the getPropertyValue() method here, + // instead of the datatype-specific methods (e.g. getPropertyStringValue()). + // This is because AuditDAO persists an entire map of values resulting in different behaviour + // (i.e. dates are persisted as Serializable) + for (String stringValue : stringValues) + { + assertEquals(stringValue, propertyValueDAO.getPropertyValue(stringValue).getSecond()); + } + for (Double doubleValue : doubleValues) + { + assertEquals(doubleValue, propertyValueDAO.getPropertyValue(doubleValue).getSecond()); + } + for (Date dateValue : dateValues) + { + assertEquals(dateValue, propertyValueDAO.getPropertyValue(dateValue).getSecond()); + } + } + long startDelete = System.currentTimeMillis(); + propertyValueDAO.cleanupUnusedValues(); + txn.commit(); + System.out.println("Cleaned values for " + i + " entries in " + (System.currentTimeMillis() - startDelete) + " ms."); + + if (!performance) + { + // Check all the properties have been deleted. + txn = transactionService.getUserTransaction(); + txn.begin(); + + for (String stringValue : stringValues) + { + assertPropDeleted(propertyValueDAO.getPropertyValue(stringValue)); + } + for (Double doubleValue : doubleValues) + { + assertPropDeleted(propertyValueDAO.getPropertyValue(doubleValue)); + } + for (Date dateValue : dateValues) + { + assertPropDeleted(propertyValueDAO.getPropertyValue(dateValue)); + } + + txn.commit(); + } + } + } + + private void assertPropDeleted(Pair value) + { + if (value != null) + { + String msg = String.format("Property value [%s=%s] should have been deleted by cleanup script.", + value.getSecond().getClass().getSimpleName(), value.getSecond()); + fail(msg); + } + } + + public void scriptCanDeleteOrphanedPropsPerformance() throws Exception + { + scriptCanDeleteOrphanedPropsWork(true); + } + + public static void main(String[] args) + { + try + { + AuditDAOTest test = new AuditDAOTest(); + test.setUp(); + System.out.println("Press any key to run performance test."); + System.in.read(); + test.scriptCanDeleteOrphanedPropsPerformance(); + System.out.println("Press any key to shutdown."); + System.in.read(); + test.tearDown(); + } + catch (Throwable e) + { + e.printStackTrace(); + } + finally + { + ApplicationContextHelper.closeApplicationContext(); + } + } } diff --git a/source/test-java/org/alfresco/repo/domain/propval/PropValGenerator.java b/source/test-java/org/alfresco/repo/domain/propval/PropValGenerator.java new file mode 100644 index 0000000000..797fca0112 --- /dev/null +++ b/source/test-java/org/alfresco/repo/domain/propval/PropValGenerator.java @@ -0,0 +1,186 @@ +/* + * Copyright (C) 2005-2014 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ +package org.alfresco.repo.domain.propval; + +import static org.junit.Assert.assertNotNull; + +import java.io.Serializable; +import java.util.Date; +import java.util.Random; +import java.util.UUID; + +import org.alfresco.util.Pair; + +/** + * Creates property values suitable for use in testing. + * + * @author Matt Ward + */ +public class PropValGenerator +{ + private static final Random rand = new Random(); + private final PropertyValueDAO propertyValueDAO; + private final DoubleGen doubleGen = new DoubleGen(); + private final DateGen dateGen = new DateGen(); + private final SerializableGen serGen = new SerializableGen(); + + public PropValGenerator(PropertyValueDAO propertyValueDAO) + { + this.propertyValueDAO = propertyValueDAO; + } + + public String createUniqueString() + { + // No need to do anything more clever than create a UUID. + return UUID.randomUUID().toString(); + } + + public Double createUniqueDouble() + { + return doubleGen.getUnusedValue(); + } + + public Date createUniqueDate() + { + return dateGen.getUnusedValue(); + } + + public Serializable createUniqueSerializable() + { + return serGen.getUnusedValue(); + } + + + private class DoubleGen extends UniqueValueGenerator + { + @Override + protected Double createValue() + { + return (Math.pow(2,32)) + (rand.nextDouble() * (Math.pow(2,32) - Math.pow(2,31))); + } + + @Override + protected Pair getExistingValue(Double value) + { + return propertyValueDAO.getPropertyDoubleValue(value); + } + } + + private class DateGen extends UniqueValueGenerator + { + @Override + protected Date createValue() + { + Random rand = new Random(); + Date date = new Date(rand.nextLong()); + // Dates are stored to day precision, make sure we return the + // same value that will be stored, for comparison in assert statements etc. + Date truncDate = PropertyDateValueEntity.truncateDate(date); + return truncDate; + } + + @Override + protected Pair getExistingValue(Date value) + { + return propertyValueDAO.getPropertyDateValue(value); + } + } + + private class SerializableGen extends UniqueValueGenerator + { + @Override + protected Serializable createValue() + { + return new MySerializable(); + } + + @Override + protected Pair getExistingValue(Serializable value) + { + return propertyValueDAO.getPropertyValue(value); + } + } + + private static class MySerializable implements Serializable + { + private static final long serialVersionUID = 1L; + private final Long val; + + public MySerializable() + { + val = rand.nextLong(); + } + + @Override + public int hashCode() + { + final int prime = 31; + int result = 1; + result = prime * result + ((this.val == null) ? 0 : this.val.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) + { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + MySerializable other = (MySerializable) obj; + if (this.val == null) + { + if (other.val != null) return false; + } + else if (!this.val.equals(other.val)) return false; + return true; + } + } + + /** + * Generate values that aren't currently in the properties tables. By trying random values + * several times until an unused value is used. This is to help avoid red builds, since the + * assumption by the orphaned property cleanup test is that the properties are not in use + * (otherwise they won't be cleaned up!) + */ + private abstract class UniqueValueGenerator + { + private final int maxTries = 5; + protected abstract T createValue(); + protected abstract Pair getExistingValue(T value); + + public T getUnusedValue() + { + int tries = 0; + T value = null; + boolean exists = true; + while (exists) + { + if (++tries > maxTries) + { + throw new RuntimeException("Unable to generate unused value in " + maxTries + " tries."); + } + value = createValue(); + assertNotNull("Value generator should not generate a null value, but did.", value); + // Make sure the value isn't already present in the properties tables. + exists = (getExistingValue(value) != null); + } + return value; + } + } +} diff --git a/source/test-java/org/alfresco/repo/domain/propval/PropertyValueDAOTest.java b/source/test-java/org/alfresco/repo/domain/propval/PropertyValueDAOTest.java index 8389d316a7..55e0b25c81 100644 --- a/source/test-java/org/alfresco/repo/domain/propval/PropertyValueDAOTest.java +++ b/source/test-java/org/alfresco/repo/domain/propval/PropertyValueDAOTest.java @@ -44,6 +44,7 @@ import org.alfresco.util.GUID; import org.alfresco.util.Pair; import org.junit.experimental.categories.Category; import org.springframework.context.ApplicationContext; +import org.springframework.dao.DataIntegrityViolationException; import org.springframework.extensions.surf.util.ISO8601DateFormat; /** @@ -874,4 +875,84 @@ public class PropertyValueDAOTest extends TestCase }, false); assertEquals("ID-value pair incorrect", v2Pair, pair); } + + /** + * MNT-10067: use a script to delete the orphaned property values. + */ + public void testScriptCanDeleteUnusedProps() + { + PropValGenerator valueGen = new PropValGenerator(propertyValueDAO); + + // Find some values to use in the test that aren't already in the property tables. + final String stringValue = valueGen.createUniqueString(); + final Double doubleValue = valueGen.createUniqueDouble(); + final Date dateValue = valueGen.createUniqueDate(); + final Serializable serValue = valueGen.createUniqueSerializable(); + // We'll keep a list of the DB IDs of the persisted values so we can later check they've been deleted. + final Map persistedIDs = new HashMap(); + + txnHelper.doInTransaction(new RetryingTransactionCallback() + { + public Void execute() throws Throwable + { + persistedIDs.put(stringValue, propertyValueDAO.getOrCreatePropertyStringValue(stringValue).getFirst()); + persistedIDs.put(doubleValue, propertyValueDAO.getOrCreatePropertyDoubleValue(doubleValue).getFirst()); + persistedIDs.put(dateValue, propertyValueDAO.getOrCreatePropertyDateValue(dateValue).getFirst()); + persistedIDs.put(serValue, propertyValueDAO.getOrCreatePropertyValue(serValue).getFirst()); + return null; + } + }); + + // Run the clean-up script. + txnHelper.doInTransaction(new RetryingTransactionCallback() + { + public Void execute() throws Throwable + { + // Check there are some persisted values to delete. + assertEquals(stringValue, propertyValueDAO.getPropertyStringValue(stringValue).getSecond()); + assertEquals(doubleValue, propertyValueDAO.getPropertyDoubleValue(doubleValue).getSecond()); + assertEquals(dateValue, propertyValueDAO.getPropertyDateValue(dateValue).getSecond()); + // Serializable values are the odd-one-out; we can't query for them by value + // and no de-duplication is used during storage. + assertEquals(serValue, propertyValueDAO.getPropertyValueById(persistedIDs.get(serValue)).getSecond()); + + propertyValueDAO.cleanupUnusedValues(); + return null; + } + }); + + // Check all the properties have been deleted. + txnHelper.doInTransaction(new RetryingTransactionCallback() + { + public Void execute() throws Throwable + { + assertPropDeleted(propertyValueDAO.getPropertyStringValue(stringValue)); + assertPropDeleted(propertyValueDAO.getPropertyDoubleValue(doubleValue)); + // TODO: fix date deletion, not currently handled by CleanAlfPropTables.sql +// assertPropDeleted(propertyValueDAO.getPropertyDateValue(dateValue)); + // Serializable values cannot be queried by value + try + { + propertyValueDAO.getPropertyValueById(persistedIDs.get(serValue)); + fail(String.format("Persisted %s was not deleted, but should have been.", + serValue.getClass().getSimpleName())); + } + catch (DataIntegrityViolationException e) + { + // Good - it was deleted. + } + return null; + } + }); + } + + private void assertPropDeleted(Pair value) + { + if (value != null) + { + String msg = String.format("Property value [%s=%s] should have been deleted by cleanup script.", + value.getSecond().getClass().getSimpleName(), value.getSecond()); + fail(msg); + } + } } diff --git a/source/test-java/org/alfresco/repo/domain/schema/script/ScriptBundleExecutorImplIntegrationTest.java b/source/test-java/org/alfresco/repo/domain/schema/script/ScriptBundleExecutorImplIntegrationTest.java new file mode 100644 index 0000000000..08b0ecd551 --- /dev/null +++ b/source/test-java/org/alfresco/repo/domain/schema/script/ScriptBundleExecutorImplIntegrationTest.java @@ -0,0 +1,90 @@ +/* + * Copyright (C) 2005-2014 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ +package org.alfresco.repo.domain.schema.script; + +import static org.junit.Assert.assertEquals; + +import java.util.List; + +import javax.sql.DataSource; + +import org.alfresco.util.ApplicationContextHelper; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.springframework.context.ApplicationContext; +import org.springframework.jdbc.core.JdbcTemplate; + +/** + * Integration tests for the {@link ScriptBundleExecutorImpl} class. + * + * @author Matt Ward + */ +public class ScriptBundleExecutorImplIntegrationTest +{ + private static ApplicationContext ctx; + private ScriptBundleExecutor bundleExecutor; + private DataSource dataSource; + private JdbcTemplate jdbcTmpl; + + @BeforeClass + public static void setUpBeforeClass() throws Exception + { + String[] config = new String[] { + "classpath:alfresco/application-context.xml", + "classpath:scriptexec/script-exec-test.xml" + }; + ctx = ApplicationContextHelper.getApplicationContext(config); + } + + @Before + public void setUp() throws Exception + { + bundleExecutor = ctx.getBean("bundleExecutor", ScriptBundleExecutorImpl.class); + dataSource = ctx.getBean("dataSource", DataSource.class); + jdbcTmpl = new JdbcTemplate(dataSource); + } + + @Test + public void canExecuteBundle() + { + bundleExecutor.exec("scriptexec/${db.script.dialect}/bundle", "script_a.sql", "script_b.sql", "script_c.sql"); + + String select = "select message from alf_test_bundle order by message asc"; + List res = jdbcTmpl.queryForList(select, String.class); + assertEquals(2, res.size()); + // script_c deleted "script_a message 1" + assertEquals("script_a message 2", res.get(0)); + assertEquals("script_b", res.get(1)); + } + + @Test + public void postScriptIsRunFinallyEvenAfterEarlierFailure() + { + // script_b.sql will fail + bundleExecutor.execWithPostScript("scriptexec/${db.script.dialect}/bundle2", + "post_script.sql", "script_a.sql", "script_b.sql"); + + String select = "select message from alf_test_bundle2 order by message asc"; + List res = jdbcTmpl.queryForList(select, String.class); + assertEquals(1, res.size()); + // post_script deleted "script_a message 1" + assertEquals("script_a message 2", res.get(0)); + } +} diff --git a/source/test-java/org/alfresco/repo/domain/schema/script/ScriptBundleExecutorImplTest.java b/source/test-java/org/alfresco/repo/domain/schema/script/ScriptBundleExecutorImplTest.java new file mode 100644 index 0000000000..5ddc52037f --- /dev/null +++ b/source/test-java/org/alfresco/repo/domain/schema/script/ScriptBundleExecutorImplTest.java @@ -0,0 +1,93 @@ +/* + * Copyright (C) 2005-2014 Alfresco Software Limited. + * + * This file is part of Alfresco + * + * Alfresco is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * Alfresco is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with Alfresco. If not, see . + */ +package org.alfresco.repo.domain.schema.script; + +import static org.junit.Assert.*; +import static org.mockito.Matchers.anyString; +import static org.mockito.Matchers.same; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import org.apache.commons.logging.Log; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InOrder; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.runners.MockitoJUnitRunner; + +/** + * Unit tests for the {@link ScriptBundleExecutorImpl} class. + * + * @author Matt Ward + */ +@RunWith(MockitoJUnitRunner.class) +public class ScriptBundleExecutorImplTest +{ + // Class under test + private ScriptBundleExecutorImpl bundleExecutor; + private @Mock ScriptExecutor scriptExecutor; + private @Mock Log log; + + @BeforeClass + public static void setUpClass() throws Exception + { + } + + @Before + public void setUp() throws Exception + { + bundleExecutor = new ScriptBundleExecutorImpl(scriptExecutor); + bundleExecutor.log = log; + } + + @Test + public void canExecuteMultipleScripts() throws Exception + { + bundleExecutor.exec("/path/to/script/dir", "one.sql", "two.sql", "three.sql"); + + InOrder inOrder = Mockito.inOrder(scriptExecutor); + inOrder.verify(scriptExecutor).executeScriptUrl("/path/to/script/dir/one.sql"); + inOrder.verify(scriptExecutor).executeScriptUrl("/path/to/script/dir/two.sql"); + inOrder.verify(scriptExecutor).executeScriptUrl("/path/to/script/dir/three.sql"); + } + + @Test + public void willAlwaysRunPostBatchScript() throws Exception + { + // The first of the "main" scripts will fail... + Exception e = new RuntimeException("Script failure!"); + doThrow(e).when(scriptExecutor).executeScriptUrl("/path/to/script/dir/work01.sql"); + + bundleExecutor.execWithPostScript("/path/to/script/dir", "post.sql", "pre.sql", "work01.sql", "work02.sql"); + + InOrder inOrder = Mockito.inOrder(scriptExecutor); + inOrder.verify(scriptExecutor).executeScriptUrl("/path/to/script/dir/pre.sql"); + inOrder.verify(scriptExecutor).executeScriptUrl("/path/to/script/dir/work01.sql"); + // work02.sql will NOT be executed, but the post-script will be. + inOrder.verify(scriptExecutor, never()).executeScriptUrl("/path/to/script/dir/work02.sql"); + inOrder.verify(scriptExecutor).executeScriptUrl("/path/to/script/dir/post.sql"); + + verify(log).error(anyString(), same(e)); + } +} diff --git a/source/test-java/org/alfresco/repo/domain/schema/script/ScriptExecutorImplIntegrationTest.java b/source/test-java/org/alfresco/repo/domain/schema/script/ScriptExecutorImplIntegrationTest.java new file mode 100644 index 0000000000..25cb1b28d3 --- /dev/null +++ b/source/test-java/org/alfresco/repo/domain/schema/script/ScriptExecutorImplIntegrationTest.java @@ -0,0 +1,158 @@ +package org.alfresco.repo.domain.schema.script; + +import static org.junit.Assert.assertEquals; + +import java.util.List; + +import javax.sql.DataSource; + +import org.alfresco.error.AlfrescoRuntimeException; +import org.alfresco.util.ApplicationContextHelper; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.dialect.Dialect; +import org.hibernate.dialect.MySQLInnoDBDialect; +import org.hibernate.dialect.PostgreSQLDialect; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.springframework.context.ApplicationContext; +import org.springframework.jdbc.core.JdbcTemplate; + +/** + * Integration tests for the {@link ScriptExecutorImpl} class. + * + * @author Matt Ward + */ +public class ScriptExecutorImplIntegrationTest +{ + private final static Log log = LogFactory.getLog(ScriptExecutorImplIntegrationTest.class); + private static ApplicationContext ctx; + private ScriptExecutor scriptExecutor; + private DataSource dataSource; + private JdbcTemplate jdbcTmpl; + private Dialect dialect; + + @BeforeClass + public static void setUpBeforeClass() throws Exception + { + String[] config = new String[] { + "classpath:alfresco/application-context.xml", + "classpath:scriptexec/script-exec-test.xml" + }; + ctx = ApplicationContextHelper.getApplicationContext(config); + } + + @Before + public void setUp() throws Exception + { + scriptExecutor = ctx.getBean("simpleScriptExecutor", ScriptExecutorImpl.class); + dataSource = ctx.getBean("dataSource", DataSource.class); + dialect = ctx.getBean("dialect", Dialect.class); + jdbcTmpl = new JdbcTemplate(dataSource); + } + + /** + * Check that we can execute a simple script, without any dialect-specific loading. + * + * @throws Exception + */ + @Test + public void canExecuteBasicScript() throws Exception + { + scriptExecutor.executeScriptUrl("scriptexec/basic.sql"); + + String select = "select textfield from alf_test_script_exec order by textfield asc"; + List res = jdbcTmpl.queryForList(select, String.class); + assertEquals(2, res.size()); + assertEquals("hello", res.get(0)); + assertEquals("world", res.get(1)); + } + + /** + * Check that a script designed to be run for all varieties of DBMS + * (i.e. in subdirectory org.hibernate.dialect.Dialect) will run + * regardless of specific dialect (e.g. MySQL or PostgreSQL) + * + * @throws Exception + */ + @Test + public void canExecuteGenericDialectScript() throws Exception + { + scriptExecutor.executeScriptUrl("scriptexec/${db.script.dialect}/generic.sql"); + + String select = "select message from alf_test_script_exec_generic"; + List res = jdbcTmpl.queryForList(select, String.class); + assertEquals(1, res.size()); + assertEquals("generic", res.get(0)); + } + + /** + * Test the case of executing a specific (e.g. PostgreSQL) database script + * when no general script is present (therefore no overriding mechanism is required). + * + * @throws Exception + */ + @Test + public void canExecuteSpecificDialectScript() throws Exception + { + scriptExecutor.executeScriptUrl("scriptexec/${db.script.dialect}/specific.sql"); + + String select = "select message from alf_test_script_exec_specific"; + List res = jdbcTmpl.queryForList(select, String.class); + assertEquals(1, res.size()); + if (dialect.getClass().equals(MySQLInnoDBDialect.class)) + { + assertEquals("mysql", res.get(0)); + } + else if (dialect.getClass().equals(PostgreSQLDialect.class)) + { + assertEquals("postgresql", res.get(0)); + } + else + { + log.warn("No suitable dialect-specific DB script for test canExecuteSpecificDialectScript()"); + } + } + + /** + * Test the case of executing a specific database script (e.g. PostgreSQL) when + * a more generic script also exists -- the more generic script is not run. + * + * @throws Exception + */ + @Test + public void canExecuteSpecificDialectOverridingGenericScript() throws Exception + { + scriptExecutor.executeScriptUrl("scriptexec/${db.script.dialect}/override.sql"); + + String select = "select message from alf_test_script_exec_override"; + List res = jdbcTmpl.queryForList(select, String.class); + assertEquals(1, res.size()); + if (dialect.getClass().equals(MySQLInnoDBDialect.class)) + { + assertEquals("mysql", res.get(0)); + } + else if (dialect.getClass().equals(PostgreSQLDialect.class)) + { + assertEquals("postgresql", res.get(0)); + } + else + { + log.warn("No suitable dialect-specific DB script for test canExecuteSpecificDialectOverridingGenericScript()"); + } + } + + @Test() + public void exceptionThrownWhenNoMatchingScriptFound() throws Exception + { + try + { + scriptExecutor.executeScriptUrl("scriptexec/${db.script.dialect}/non-existent-file.sql"); + } + catch (AlfrescoRuntimeException e) + { + assertEquals("schema.update.err.script_not_found", e.getMsgId()); + } + } +} diff --git a/source/test-resources/scriptexec/basic.sql b/source/test-resources/scriptexec/basic.sql new file mode 100644 index 0000000000..89cbdd2292 --- /dev/null +++ b/source/test-resources/scriptexec/basic.sql @@ -0,0 +1,12 @@ +-- Simple test of the script executor functionality. + + +drop table alf_test_script_exec; --(optional) + +create table alf_test_script_exec +( + textfield VARCHAR(255) +); + +insert into alf_test_script_exec (textfield) values ('hello'); +insert into alf_test_script_exec (textfield) values ('world'); diff --git a/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle/script_a.sql b/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle/script_a.sql new file mode 100644 index 0000000000..506df1b644 --- /dev/null +++ b/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle/script_a.sql @@ -0,0 +1,9 @@ +drop table alf_test_bundle; --(optional) + +create table alf_test_bundle +( + message VARCHAR(255) +); + +insert into alf_test_bundle (message) values ('script_a message 1'); +insert into alf_test_bundle (message) values ('script_a message 2'); diff --git a/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle/script_b.sql b/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle/script_b.sql new file mode 100644 index 0000000000..1dd176d863 --- /dev/null +++ b/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle/script_b.sql @@ -0,0 +1 @@ +insert into alf_test_bundle (message) values ('script_b'); diff --git a/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle/script_c.sql b/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle/script_c.sql new file mode 100644 index 0000000000..d01667d20a --- /dev/null +++ b/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle/script_c.sql @@ -0,0 +1 @@ +delete from alf_test_bundle where message = 'script_a message 1'; diff --git a/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle2/post_script.sql b/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle2/post_script.sql new file mode 100644 index 0000000000..f9c4ce372d --- /dev/null +++ b/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle2/post_script.sql @@ -0,0 +1 @@ +delete from alf_test_bundle2 where message = 'script_a message 1'; diff --git a/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle2/script_a.sql b/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle2/script_a.sql new file mode 100644 index 0000000000..78c91fe58b --- /dev/null +++ b/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle2/script_a.sql @@ -0,0 +1,9 @@ +drop table alf_test_bundle2; --(optional) + +create table alf_test_bundle2 +( + message VARCHAR(255) +); + +insert into alf_test_bundle2 (message) values ('script_a message 1'); +insert into alf_test_bundle2 (message) values ('script_a message 2'); diff --git a/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle2/script_b.sql b/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle2/script_b.sql new file mode 100644 index 0000000000..33e2fcf8a9 --- /dev/null +++ b/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/bundle2/script_b.sql @@ -0,0 +1,2 @@ +-- Note the deliberate typo - this script MUST fail! +insert int alf_test_bundle2 (message) values ('script_b'); diff --git a/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/generic.sql b/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/generic.sql new file mode 100644 index 0000000000..6391b05f18 --- /dev/null +++ b/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/generic.sql @@ -0,0 +1,8 @@ +drop table alf_test_script_exec_generic; --(optional) + +create table alf_test_script_exec_generic +( + message VARCHAR(255) +); + +insert into alf_test_script_exec_generic (message) values ('generic'); diff --git a/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/override.sql b/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/override.sql new file mode 100644 index 0000000000..80bf5d9d97 --- /dev/null +++ b/source/test-resources/scriptexec/org.hibernate.dialect.Dialect/override.sql @@ -0,0 +1,8 @@ +drop table alf_test_script_exec_override; --(optional) + +create table alf_test_script_exec_override +( + message VARCHAR(255) +); + +insert into alf_test_script_exec_override (message) values ('FAILURE! script should not have been run.'); diff --git a/source/test-resources/scriptexec/org.hibernate.dialect.MySQLInnoDBDialect/override.sql b/source/test-resources/scriptexec/org.hibernate.dialect.MySQLInnoDBDialect/override.sql new file mode 100644 index 0000000000..09214b2f29 --- /dev/null +++ b/source/test-resources/scriptexec/org.hibernate.dialect.MySQLInnoDBDialect/override.sql @@ -0,0 +1,8 @@ +drop table alf_test_script_exec_override; --(optional) + +create table alf_test_script_exec_override +( + message VARCHAR(255) +); + +insert into alf_test_script_exec_override (message) values ('mysql'); diff --git a/source/test-resources/scriptexec/org.hibernate.dialect.MySQLInnoDBDialect/specific.sql b/source/test-resources/scriptexec/org.hibernate.dialect.MySQLInnoDBDialect/specific.sql new file mode 100644 index 0000000000..528bb11133 --- /dev/null +++ b/source/test-resources/scriptexec/org.hibernate.dialect.MySQLInnoDBDialect/specific.sql @@ -0,0 +1,8 @@ +drop table alf_test_script_exec_specific; --(optional) + +create table alf_test_script_exec_specific +( + message VARCHAR(255) +); + +insert into alf_test_script_exec_specific (message) values ('mysql'); diff --git a/source/test-resources/scriptexec/org.hibernate.dialect.PostgreSQLDialect/override.sql b/source/test-resources/scriptexec/org.hibernate.dialect.PostgreSQLDialect/override.sql new file mode 100644 index 0000000000..cb63a7364d --- /dev/null +++ b/source/test-resources/scriptexec/org.hibernate.dialect.PostgreSQLDialect/override.sql @@ -0,0 +1,8 @@ +drop table alf_test_script_exec_override; --(optional) + +create table alf_test_script_exec_override +( + message VARCHAR(255) +); + +insert into alf_test_script_exec_override (message) values ('postgresql'); diff --git a/source/test-resources/scriptexec/org.hibernate.dialect.PostgreSQLDialect/specific.sql b/source/test-resources/scriptexec/org.hibernate.dialect.PostgreSQLDialect/specific.sql new file mode 100644 index 0000000000..79bf9c7452 --- /dev/null +++ b/source/test-resources/scriptexec/org.hibernate.dialect.PostgreSQLDialect/specific.sql @@ -0,0 +1,8 @@ +drop table alf_test_script_exec_specific; --(optional) + +create table alf_test_script_exec_specific +( + message VARCHAR(255) +); + +insert into alf_test_script_exec_specific (message) values ('postgresql'); diff --git a/source/test-resources/scriptexec/script-exec-test.xml b/source/test-resources/scriptexec/script-exec-test.xml new file mode 100644 index 0000000000..42967f2668 --- /dev/null +++ b/source/test-resources/scriptexec/script-exec-test.xml @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + +