Dialect
class name: ${db.script.dialect} */
private static final String PLACEHOLDER_DIALECT = "\\$\\{db\\.script\\.dialect\\}";
/** The global property containing the default batch size used by --FOREACH */
private static final String PROPERTY_DEFAULT_BATCH_SIZE = "system.upgrade.default.batchsize";
private static final String MSG_DIALECT_USED = "schema.update.msg.dialect_used";
private static final String MSG_DATABASE_USED = "schema.update.msg.database_used";
private static final String MSG_BYPASSING_SCHEMA_UPDATE = "schema.update.msg.bypassing";
private static final String MSG_NORMALIZED_SCHEMA = "schema.update.msg.normalized_schema";
private static final String MSG_NO_CHANGES = "schema.update.msg.no_changes";
private static final String MSG_ALL_STATEMENTS = "schema.update.msg.all_statements";
private static final String MSG_EXECUTING_GENERATED_SCRIPT = "schema.update.msg.executing_generated_script";
private static final String MSG_EXECUTING_COPIED_SCRIPT = "schema.update.msg.executing_copied_script";
private static final String MSG_EXECUTING_STATEMENT = "schema.update.msg.executing_statement";
private static final String MSG_OPTIONAL_STATEMENT_FAILED = "schema.update.msg.optional_statement_failed";
private static final String WARN_DIALECT_UNSUPPORTED = "schema.update.warn.dialect_unsupported";
private static final String WARN_DIALECT_HSQL = "schema.update.warn.dialect_hsql";
private static final String WARN_DIALECT_DERBY = "schema.update.warn.dialect_derby";
private static final String ERR_FORCED_STOP = "schema.update.err.forced_stop";
private static final String ERR_DIALECT_SHOULD_USE = "schema.update.err.dialect_should_use";
private static final String ERR_MULTIPLE_SCHEMAS = "schema.update.err.found_multiple";
private static final String ERR_PREVIOUS_FAILED_BOOTSTRAP = "schema.update.err.previous_failed";
private static final String ERR_STATEMENT_FAILED = "schema.update.err.statement_failed";
private static final String ERR_UPDATE_FAILED = "schema.update.err.update_failed";
private static final String ERR_VALIDATION_FAILED = "schema.update.err.validation_failed";
private static final String ERR_SCRIPT_NOT_RUN = "schema.update.err.update_script_not_run";
private static final String ERR_SCRIPT_NOT_FOUND = "schema.update.err.script_not_found";
private static final String ERR_STATEMENT_INCLUDE_BEFORE_SQL = "schema.update.err.statement_include_before_sql";
private static final String ERR_STATEMENT_VAR_ASSIGNMENT_BEFORE_SQL = "schema.update.err.statement_var_assignment_before_sql";
private static final String ERR_STATEMENT_VAR_ASSIGNMENT_FORMAT = "schema.update.err.statement_var_assignment_format";
private static final String ERR_STATEMENT_TERMINATOR = "schema.update.err.statement_terminator";
private static final String DEBUG_SCHEMA_COMP_NO_REF_FILE = "system.schema_comp.debug.no_ref_file";
private static final String INFO_SCHEMA_COMP_ALL_OK = "system.schema_comp.info.all_ok";
private static final String WARN_SCHEMA_COMP_PROBLEMS_FOUND = "system.schema_comp.warn.problems_found";
private static final String WARN_SCHEMA_COMP_PROBLEMS_FOUND_NO_FILE = "system.schema_comp.warn.problems_found_no_file";
private static final String DEBUG_SCHEMA_COMP_TIME_TAKEN = "system.schema_comp.debug.time_taken";
public static final int DEFAULT_LOCK_RETRY_COUNT = 24;
public static final int DEFAULT_LOCK_RETRY_WAIT_SECONDS = 5;
public static final int DEFAULT_MAX_STRING_LENGTH = 1024;
private static volatile int maxStringLength = DEFAULT_MAX_STRING_LENGTH;
private Dialect dialect;
private ResourcePatternResolver rpr = new PathMatchingResourcePatternResolver(this.getClass().getClassLoader());
/**
* @see PropertyValue#DEFAULT_MAX_STRING_LENGTH
*/
private static final void setMaxStringLength(int length)
{
if (length < 1024)
{
throw new AlfrescoRuntimeException("The maximum string length must >= 1024 characters.");
}
SchemaBootstrap.maxStringLength = length;
}
/**
* @return Returns the maximum number of characters that a string field can be
*/
public static final int getMaxStringLength()
{
return SchemaBootstrap.maxStringLength;
}
/**
* Truncates or returns a string that will fit into the string columns in the schema. Text fields can
* either cope with arbitrarily long text fields or have the default limit, {@link #DEFAULT_MAX_STRING_LENGTH}.
*
* @param value the string to check
* @return Returns a string that is short enough for {@link SchemaBootstrap#getMaxStringLength()}
*
* @since 3.2
*/
public static final String trimStringForTextFields(String value)
{
if (value != null && value.length() > maxStringLength)
{
return value.substring(0, maxStringLength);
}
else
{
return value;
}
}
/**
* Provide a reference to the DescriptorService, used to provide information
* about the repository such as the database schema version number.
*
* @param descriptorService the descriptorService to set
*/
public void setDescriptorService(DescriptorService descriptorService)
{
this.descriptorService = descriptorService;
}
/**
* Sets the previously auto-detected Hibernate dialect.
*
* @param dialect
* the dialect
*/
public void setDialect(Dialect dialect)
{
this.dialect = dialect;
}
private static Log logger = LogFactory.getLog(SchemaBootstrap.class);
private DescriptorService descriptorService;
private DataSource dataSource;
private AppliedPatchDAO appliedPatchDAO;
private LocalSessionFactoryBean localSessionFactory;
private String schemaOuputFilename;
private boolean updateSchema;
private boolean stopAfterSchemaBootstrap;
private List
* WARNING: USE FOR DEBUG AND UPGRADE TESTING ONLY
*
* @param stopAfterSchemaBootstrap true to terminate (with exception) after
* running all the usual schema updates and checks.
*/
public void setStopAfterSchemaBootstrap(boolean stopAfterSchemaBootstrap)
{
this.stopAfterSchemaBootstrap = stopAfterSchemaBootstrap;
}
/**
* Specifies the schema reference files that will be used to validate the repository
* schema whenever changes have been made. The database dialect placeholder will be
* resolved so that the correct reference files are loaded for the current database
* type (e.g. PostgreSQL)
*
* @param schemaReferenceUrls the schemaReferenceUrls to set
* @see #PLACEHOLDER_DIALECT
*/
public void setSchemaReferenceUrls(List
* The system - as of V2.1.2 - will attempt to adjust the maximum string length size
* automatically and therefore this method is not normally required. But it is possible
* to manually override the value if, for example, the system doesn't guess the correct
* maximum length or if the dialect is not explicitly catered for.
*
* All negative or zero values are ignored and the system defaults to its best guess based
* on the dialect being used.
*
* @param maximumStringLength the maximum length of the string_value columns
*/
public void setMaximumStringLength(int maximumStringLength)
{
if (maximumStringLength > 0)
{
this.maximumStringLength = maximumStringLength;
}
}
/**
* Get the limit for the hibernate executions queue
*/
public int getHibernateMaxExecutions()
{
return ActionQueue.getMAX_EXECUTIONS_SIZE();
}
/**
* Set the limit for the hibernate executions queue
* Less than zero always uses event amalgamation
*/
public void setHibernateMaxExecutions(int hibernateMaxExecutions)
{
ActionQueue.setMAX_EXECUTIONS_SIZE(hibernateMaxExecutions);
}
/**
* Sets the properties map from which we look up some configuration settings.
*
* @param globalProperties
* the global properties
*/
public void setGlobalProperties(Properties globalProperties)
{
this.globalProperties = globalProperties;
}
private SessionFactory getSessionFactory()
{
return (SessionFactory) localSessionFactory.getObject();
}
/**
* Register a new script for execution when creating a clean schema. The order of registration
* determines the order of execution.
*
* @param preCreateScriptUrl the script URL, possibly containing the ${db.script.dialect} placeholder
*/
public void addPreCreateScriptUrl(String preCreateScriptUrl)
{
if (logger.isDebugEnabled())
{
logger.debug("Registered create script URL (pre-Hibernate): " + preCreateScriptUrl);
}
this.preCreateScriptUrls.add(preCreateScriptUrl);
}
/**
* Register a new script for execution after the Hibernate schema creation phase. The order of registration
* determines the order of execution.
*
* @param postCreateScriptUrl the script URL, possibly containing the ${db.script.dialect} placeholder
*/
public void addPostCreateScriptUrl(String postUpdateScriptUrl)
{
if (logger.isDebugEnabled())
{
logger.debug("Registered create script URL (post-Hibernate): " + postUpdateScriptUrl);
}
this.postCreateScriptUrls.add(postUpdateScriptUrl);
}
/**
* Register a new SQL-based patch for consideration against the instance (before Hibernate execution)
*
* @param scriptPatch the patch that will be examined for execution
*/
public void addPreUpdateScriptPatch(SchemaUpgradeScriptPatch scriptPatch)
{
if(false == scriptPatch.isIgnored())
{
if (logger.isDebugEnabled())
{
logger.debug("Registered script patch (pre-Hibernate): " + scriptPatch.getId());
}
this.preUpdateScriptPatches.add(scriptPatch);
}
else
{
if (logger.isDebugEnabled())
{
logger.debug("Ignoring script patch (pre-Hibernate): " + scriptPatch.getId());
}
}
}
/**
* Register a new SQL-based patch for consideration against the instance (after Hibernate execution)
*
* @param scriptPatch the patch that will be examined for execution
*/
public void addPostUpdateScriptPatch(SchemaUpgradeScriptPatch scriptPatch)
{
if(false == scriptPatch.isIgnored())
{
if (logger.isDebugEnabled())
{
logger.debug("Registered script patch (post-Hibernate): " + scriptPatch.getId());
}
this.postUpdateScriptPatches.add(scriptPatch);
}
else
{
if (logger.isDebugEnabled())
{
logger.debug("Ignoring script patch (post-Hibernate): " + scriptPatch.getId());
}
}
}
/**
* Register a new SQL-based patch for consideration against the Activiti instance
*
* @param scriptPatch the patch that will be examined for execution
*/
public void addUpdateActivitiScriptPatch(SchemaUpgradeScriptPatch scriptPatch)
{
if(false == scriptPatch.isIgnored())
{
if (logger.isDebugEnabled())
{
logger.debug("Registered Activiti script patch: " + scriptPatch.getId());
}
this.updateActivitiScriptPatches.add(scriptPatch);
}
else
{
if (logger.isDebugEnabled())
{
logger.debug("Ignoring Activiti script patch: " + scriptPatch.getId());
}
}
}
private static class NoSchemaException extends Exception
{
private static final long serialVersionUID = 5574280159910824660L;
}
/**
* Used to indicate a forced stop of the bootstrap.
*
* @see SchemaBootstrap#setStopAfterSchemaBootstrap(boolean)
*
* @author Derek Hulley
* @since 3.1.1
*/
private static class BootstrapStopException extends RuntimeException
{
private static final long serialVersionUID = 4250016675538442181L;
private BootstrapStopException()
{
super(I18NUtil.getMessage(ERR_FORCED_STOP));
}
}
/**
* Count applied patches. This fails if multiple applied patch tables are found,
* which normally indicates that the schema view needs to be limited.
*
* @param cfg The Hibernate config
* @param connection a valid database connection
* @return Returns the number of applied patches
* @throws NoSchemaException if the table of applied patches can't be found
*/
private int countAppliedPatches(Configuration cfg, Connection connection) throws Exception
{
String defaultSchema = DatabaseMetaDataHelper.getSchema(connection);
if (defaultSchema != null && defaultSchema.length() == 0)
{
defaultSchema = null;
}
String defaultCatalog = cfg.getProperty("hibernate.default_catalog");
if (defaultCatalog != null && defaultCatalog.length() == 0)
{
defaultCatalog = null;
}
DatabaseMetaData dbMetadata = connection.getMetaData();
ResultSet tableRs = dbMetadata.getTables(defaultCatalog, defaultSchema, "%", null);
boolean newPatchTable = false;
boolean oldPatchTable = false;
try
{
boolean multipleSchemas = false;
while (tableRs.next())
{
String tableName = tableRs.getString("TABLE_NAME");
if (tableName.equalsIgnoreCase("applied_patch"))
{
if (oldPatchTable || newPatchTable)
{
// Found earlier
multipleSchemas = true;
}
oldPatchTable = true;
}
else if (tableName.equalsIgnoreCase("alf_applied_patch"))
{
if (oldPatchTable || newPatchTable)
{
// Found earlier
multipleSchemas = true;
}
newPatchTable = true;
}
}
// We go through all the tables so that multiple visible schemas are detected
if (multipleSchemas)
{
throw new AlfrescoRuntimeException(ERR_MULTIPLE_SCHEMAS);
}
}
finally
{
try { tableRs.close(); } catch (Throwable e) {e.printStackTrace(); }
}
if (newPatchTable)
{
Statement stmt = connection.createStatement();
try
{
ResultSet rs = stmt.executeQuery("select count(id) from alf_applied_patch");
rs.next();
int count = rs.getInt(1);
return count;
}
catch (SQLException e)
{
// This should work at least and is probably an indication of the user viewing multiple schemas
throw new AlfrescoRuntimeException(ERR_MULTIPLE_SCHEMAS);
}
finally
{
try { stmt.close(); } catch (Throwable e) {}
}
}
else if (oldPatchTable)
{
// found the old style table name
Statement stmt = connection.createStatement();
try
{
ResultSet rs = stmt.executeQuery("select count(id) from applied_patch");
rs.next();
int count = rs.getInt(1);
return count;
}
finally
{
try { stmt.close(); } catch (Throwable e) {}
}
}
else
{
// The applied patches table is not present
throw new NoSchemaException();
}
}
/**
* Check whether Activiti tables already created in db.
*
* @param connection a valid database connection
* @return
* The connection that is used will not be closed or manipulated in any way. This class
* merely serves to give the connection to Hibernate.
*
* @author Derek Hulley
*/
public static class SchemaBootstrapConnectionProvider extends UserSuppliedConnectionProvider
{
private static ThreadLocaltrue
if Activiti tables already created in schema, otherwise false
*/
private boolean checkActivitiTablesExist(Connection connection)
{
Statement stmt = null;
try
{
stmt = connection.createStatement();
stmt.executeQuery("select min(id_) from ACT_RU_TASK");
return true;
}
catch (SQLException e)
{
logger.debug("Did not find ACT_RU_TASK table.");
return false;
}
finally
{
try
{
if (stmt != null)
{
stmt.close();
}
}
catch (Throwable e) {}
}
}
/**
* @return Returns the name of the applied patch table, or null if the table doesn't exist
*/
private String getAppliedPatchTableName(Connection connection) throws Exception
{
Statement stmt = connection.createStatement();
try
{
stmt.executeQuery("select * from alf_applied_patch");
return "alf_applied_patch";
}
catch (Throwable e)
{
// we'll try another table name
}
finally
{
try { stmt.close(); } catch (Throwable e) {}
}
// for pre-1.4 databases, the table was named differently
stmt = connection.createStatement();
try
{
stmt.executeQuery("select * from applied_patch");
return "applied_patch";
}
catch (Throwable e)
{
// It is not there
return null;
}
finally
{
try { stmt.close(); } catch (Throwable e) {}
}
}
/**
* @return Returns the number of applied patches
*/
private boolean didPatchSucceed(Connection connection, String patchId, boolean alternative) throws Exception
{
String patchTableName = getAppliedPatchTableName(connection);
if (patchTableName == null)
{
// Table doesn't exist, yet
return false;
}
Statement stmt = connection.createStatement();
try
{
ResultSet rs = stmt.executeQuery("select succeeded, was_executed from " + patchTableName + " where id = '" + patchId + "'");
if (!rs.next())
{
return false;
}
boolean succeeded = rs.getBoolean(1);
boolean wasExecuted = rs.getBoolean(2);
if (alternative)
{
return succeeded && wasExecuted;
}
else
{
return succeeded;
}
}
finally
{
try { stmt.close(); } catch (Throwable e) {}
}
}
/**
* Finds the version.properties file and determines the installed version.schema.
* The only way to determine the original installed schema number is by quering the for the minimum value in
* alf_applied_patch.applied_to_schema. This might not work if an upgrade is attempted straight from
* Alfresco v1.0!
*
* @return the installed schema number or -1 if the installation is new.
*/
private int getInstalledSchemaNumber(Connection connection) throws Exception
{
Statement stmt = connection.createStatement();
try
{
ResultSet rs = stmt.executeQuery(
"select min(applied_to_schema) from alf_applied_patch where applied_to_schema > -1");
if (!rs.next())
{
// Nothing in the table
return -1;
}
if (rs.getObject(1) == null)
{
// Nothing in the table
return -1;
}
int installedSchema = rs.getInt(1);
return installedSchema;
}
finally
{
try { stmt.close(); } catch (Throwable e) {}
}
}
private static class LockFailedException extends Exception
{
private static final long serialVersionUID = -6676398230191205456L;
}
/**
* Records that the bootstrap process has started
*/
private synchronized void setBootstrapStarted(Connection connection) throws Exception
{
// Create the marker table
Statement stmt = connection.createStatement();
try
{
stmt.executeUpdate("create table alf_bootstrap_lock (charval CHAR(1) NOT NULL)");
// Success
return;
}
catch (Throwable e)
{
// We throw a well-known exception to be handled by retrying code if required
throw new LockFailedException();
}
finally
{
try { stmt.close(); } catch (Throwable e) {}
}
}
/**
* Records that the bootstrap process has finished
*/
private void setBootstrapCompleted(Connection connection) throws Exception
{
// Create the marker table
Statement stmt = connection.createStatement();
try
{
stmt.executeUpdate("drop table alf_bootstrap_lock");
// from Thor
executedStatementsThreadLocal.set(null);
}
catch (Throwable e)
{
// Table exists
throw AlfrescoRuntimeException.create(ERR_PREVIOUS_FAILED_BOOTSTRAP);
}
finally
{
try { stmt.close(); } catch (Throwable e) {}
}
}
/**
* Builds the schema from scratch or applies the necessary patches to the schema.
*/
private boolean updateSchema(Configuration cfg, Session session, Connection connection) throws Exception
{
boolean create = false;
try
{
countAppliedPatches(cfg, connection);
}
catch (NoSchemaException e)
{
create = true;
}
// Get the dialect
final Dialect dialect = Dialect.getDialect(cfg.getProperties());
String dialectStr = dialect.getClass().getSimpleName();
if (create)
{
long start = System.currentTimeMillis();
// execute pre-create scripts (not patches)
for (String scriptUrl : this.preCreateScriptUrls)
{
executeScriptUrl(cfg, connection, scriptUrl);
}
// Build and execute changes generated by Hibernate
File tempFile = null;
Writer writer = null;
try
{
DatabaseMetadata metadata = new DatabaseMetadata(connection, dialect);
String[] sqls = cfg.generateSchemaUpdateScript(dialect, metadata);
if (sqls.length > 0)
{
tempFile = TempFileProvider.createTempFile("AlfrescoSchema-" + dialectStr + "-Update-", ".sql");
writer = new BufferedWriter(new FileWriter(tempFile));
for (String sql : sqls)
{
writer.append(sql);
writer.append(";\n");
}
try {writer.close();} catch (Throwable e) {}
executeScriptFile(cfg, connection, tempFile, null);
}
}
finally
{
if (writer != null)
{
try {writer.close();} catch (Throwable e) {}
}
}
// execute post-create scripts (not patches)
for (String scriptUrl : this.postCreateScriptUrls)
{
executeScriptUrl(cfg, connection, scriptUrl);
}
if (logger.isInfoEnabled())
{
logger.info("Create scripts executed in "+(System.currentTimeMillis()-start)+" ms");
}
}
else
{
// Execute any pre-auto-update scripts
checkSchemaPatchScripts(cfg, connection, preUpdateScriptPatches, true);
// Build and execute changes generated by Hibernate
File tempFile = null;
Writer writer = null;
try
{
DatabaseMetadata metadata = new DatabaseMetadata(connection, dialect);
String[] sqls = cfg.generateSchemaUpdateScript(dialect, metadata);
if (sqls.length > 0)
{
tempFile = TempFileProvider.createTempFile("AlfrescoSchema-" + dialectStr + "-Update-", ".sql");
writer = new BufferedWriter(new FileWriter(tempFile));
for (String sql : sqls)
{
writer.append(sql);
writer.append(";\n");
}
}
}
finally
{
if (writer != null)
{
try {writer.close();} catch (Throwable e) {}
}
}
// execute if there were changes raised by Hibernate
if (tempFile != null)
{
executeScriptFile(cfg, connection, tempFile, null);
}
// Execute any post-auto-update scripts
checkSchemaPatchScripts(cfg, connection, postUpdateScriptPatches, true);
}
// Initialise Activiti DB, using an unclosable connection
boolean activitiTablesExist = checkActivitiTablesExist(connection);
if(!activitiTablesExist)
{
// Activiti DB updates are performed as patches in alfresco, only give
// control to activiti when creating new one.
initialiseActivitiDBSchema(new UnclosableConnection(connection));
// ALF-18996: Upgrade from 3.4.12 to 4.2.0 fails: Activiti tables have not been bootstrapped
// The Activiti bootstrap is effectively doing the work of all the other patches,
// which should be considered complete.
int installedSchemaNumber = getInstalledSchemaNumber(connection);
for (Patch activitiScriptPatch : updateActivitiScriptPatches)
{
AppliedPatch appliedPatch = new AppliedPatch();
appliedPatch.setId(activitiScriptPatch.getId());
appliedPatch.setDescription(activitiScriptPatch.getDescription());
appliedPatch.setFixesFromSchema(activitiScriptPatch.getFixesFromSchema());
appliedPatch.setFixesToSchema(activitiScriptPatch.getFixesToSchema());
appliedPatch.setTargetSchema(activitiScriptPatch.getTargetSchema());
appliedPatch.setAppliedToSchema(installedSchemaNumber);
appliedPatch.setAppliedToServer("UNKNOWN");
appliedPatch.setAppliedOnDate(new Date()); // the date applied
appliedPatch.setSucceeded(true);
appliedPatch.setWasExecuted(false);
appliedPatch.setReport("Placeholder for Activiti bootstrap at schema " + installedSchemaNumber);
appliedPatchDAO.createAppliedPatch(appliedPatch);
}
}
else
{
// Execute any auto-update scripts for Activiti tables
checkSchemaPatchScripts(cfg, connection, updateActivitiScriptPatches, true);
// verify that all Activiti patches have been applied correctly
checkSchemaPatchScripts(cfg, connection, updateActivitiScriptPatches, false);
}
return create;
}
/**
* Initialises the Activiti DB schema, if not present it's created.
*
* @param connection Connection to use the initialise DB schema
*/
private void initialiseActivitiDBSchema(Connection connection)
{
// create instance of activiti engine to initialise schema
ProcessEngine engine = null;
ProcessEngineConfiguration engineConfig = ProcessEngineConfiguration.createStandaloneProcessEngineConfiguration();
try
{
// build the engine
engine = engineConfig.setDataSource(dataSource).
setDatabaseSchemaUpdate("none").
setProcessEngineName("activitiBootstrapEngine").
setHistory("full").
setJobExecutorActivate(false).
buildProcessEngine();
// create or upgrade the DB schema
engine.getManagementService().databaseSchemaUpgrade(connection, null, DatabaseMetaDataHelper.getSchema(connection));
}
finally
{
if (engine != null)
{
// close the process engine
engine.close();
}
}
}
/**
* Check that the necessary scripts have been executed against the database
*/
private void checkSchemaPatchScripts(
Configuration cfg,
Connection connection,
List
* resolveDialectUrl(MySQLInnoDBDialect.class, "classpath:alfresco/db/${db.script.dialect}/myfile.xml")
*
* would give the following String:
*
* classpath:alfresco/db/org.hibernate.dialect.MySQLInnoDBDialect/myfile.xml
*
*/
private String resolveDialectUrl(Class> dialectClass, String resourceUrl)
{
return resourceUrl.replaceAll(PLACEHOLDER_DIALECT, dialectClass.getName());
}
/**
* Replaces the dialect placeholder in the script URL and attempts to find a file for
* it. If not found, the dialect hierarchy will be walked until a compatible script is
* found. This makes it possible to have scripts that are generic to all dialects.
*
* @return Returns an input stream onto the script, otherwise null
*/
private InputStream getScriptInputStream(Class> dialectClazz, String scriptUrl) throws Exception
{
return getDialectResource(dialectClazz, scriptUrl).getInputStream();
}
/**
* @param cfg the Hibernate configuration
* @param connection the DB connection to use
* @param scriptFile the file containing the statements
* @param scriptUrl the URL of the script to report. If this is null, the script
* is assumed to have been auto-generated.
*/
private void executeScriptFile(
Configuration cfg,
Connection connection,
File scriptFile,
String scriptUrl) throws Exception
{
final Dialect dialect = Dialect.getDialect(cfg.getProperties());
StringBuilder executedStatements = executedStatementsThreadLocal.get();
if (executedStatements == null)
{
// Validate the schema, pre-upgrade
validateSchema("Alfresco-{0}-Validation-Pre-Upgrade-{1}-", null);
dumpSchema("pre-upgrade");
// There is no lock at this stage. This process can fall out if the lock can't be applied.
setBootstrapStarted(connection);
executedStatements = new StringBuilder(8094);
executedStatementsThreadLocal.set(executedStatements);
}
if (scriptUrl == null)
{
LogUtil.info(logger, MSG_EXECUTING_GENERATED_SCRIPT, scriptFile);
}
else
{
LogUtil.info(logger, MSG_EXECUTING_COPIED_SCRIPT, scriptFile, scriptUrl);
}
InputStream scriptInputStream = new FileInputStream(scriptFile);
BufferedReader reader = new BufferedReader(new InputStreamReader(scriptInputStream, "UTF-8"));
try
{
int line = 0;
// loop through all statements
StringBuilder sb = new StringBuilder(1024);
String fetchVarName = null;
String fetchColumnName = null;
boolean doBatch = false;
int batchUpperLimit = 0;
int batchSize = 1;
MapreferenceResource
parameter.
*
* The method supports two mechanisms to report validation results:
*
*
* It is necessary to take care about freeing resources of output stream in case of the 1st approach.out
;outputFileNameTemplate
template.
* N.B.: The method only writes messages of the report. And it doesn't flush and doesn't close the specified output stream!
*
*
* @param referenceResource - {@link Resource} instance, which determines file of reference schema
* @param outputFileNameTemplate - {@link String} value, which determines template of temporary filename for validation report. It can't be null
if
* out
is null
!
* @param out - {@link PrintWriter} instance, which represents an external output stream for writing a validation report. This stream is never closed or flushed. It can't be
* null
if outputFileNameTemplate
is null
!
* @return {@link Integer} value, which determines amount of errors or warnings that were detected during validation
*/
private int attemptValidateSchema(Resource referenceResource, String outputFileNameTemplate, PrintWriter out)
{
Date startTime = new Date();
InputStream is = null;
try
{
is = new BufferedInputStream(referenceResource.getInputStream());
}
catch (IOException e)
{
throw new RuntimeException("Unable to open schema reference file: " + referenceResource);
}
XMLToSchema xmlToSchema = new XMLToSchema(is);
xmlToSchema.parse();
Schema reference = xmlToSchema.getSchema();
ExportDb exporter = new ExportDb(dataSource, dialect, descriptorService);
// Ensure that the database objects we're validating are filtered
// by the same prefix as the reference file.
exporter.setNamePrefix(reference.getDbPrefix());
exporter.execute();
Schema target = exporter.getSchema();
SchemaComparator schemaComparator = new SchemaComparator(reference, target, dialect);
schemaComparator.validateAndCompare();
Results results = schemaComparator.getComparisonResults();
Object[] outputFileNameParams = new Object[]
{
dialect.getClass().getSimpleName(),
reference.getDbPrefix()
};
PrintWriter pw;
File outputFile = null;
if (out == null)
{
String outputFileName = MessageFormat.format(outputFileNameTemplate, outputFileNameParams);
outputFile = TempFileProvider.createTempFile(outputFileName, ".txt");
try
{
pw = new PrintWriter(outputFile, SchemaComparator.CHAR_SET);
}
catch (FileNotFoundException error)
{
throw new RuntimeException("Unable to open file for writing: " + outputFile);
}
catch (UnsupportedEncodingException error)
{
throw new RuntimeException("Unsupported char set: " + SchemaComparator.CHAR_SET, error);
}
}
else
{
pw = out;
}
// Populate the file with details of the comparison's results.
for (Result result : results)
{
pw.print(result.describe());
pw.print(SchemaComparator.LINE_SEPARATOR);
}
// We care only about output streams for reporting, which are created specially for current reference resource...
if (null == out)
{
pw.close();
}
if (results.size() == 0)
{
LogUtil.info(logger, INFO_SCHEMA_COMP_ALL_OK, referenceResource);
}
else
{
int numProblems = results.size();
if (outputFile == null)
{
LogUtil.warn(logger, WARN_SCHEMA_COMP_PROBLEMS_FOUND_NO_FILE, numProblems);
}
else
{
LogUtil.warn(logger, WARN_SCHEMA_COMP_PROBLEMS_FOUND, numProblems, outputFile);
}
}
Date endTime = new Date();
long durationMillis = endTime.getTime() - startTime.getTime();
LogUtil.debug(logger, DEBUG_SCHEMA_COMP_TIME_TAKEN, durationMillis);
return results.size();
}
/**
* Produces schema dump in XML format: this is performed pre- and post-upgrade (i.e. if
* changes are made to the schema) and can made upon demand via JMX.
*
* @return List of output files.
*/
public List
* Alfresco-schema-DialectName-whenDumped-dbPrefix-23498732.xml
*
* Where the digits serve to create a unique temp file name. If whenDumped is empty or null,
* then the output is similar to:
*
* Alfresco-schema-DialectName-dbPrefix-23498732.xml
*
* If dbPrefixes is null, then the default list is used (see {@link MultiFileDumper#DEFAULT_PREFIXES})
* The dump files' paths are logged at info level.
*
* @param whenDumped
* @param dbPrefixes Array of database object prefixes to filter by, e.g. "alf_"
* @return List of output files.
*/
private List