ACS-1600 : Error when running propTablesCleanupJob on an env with 100 million records in alf_prop_value (#473)

- commit changes to db anytime `deleteBatchSize` of deleted data is reached
This commit is contained in:
Denis Ungureanu
2021-05-21 14:26:01 +03:00
committed by GitHub
parent 5092bf8a89
commit 00b0b21668
2 changed files with 1 additions and 2 deletions

View File

@@ -220,7 +220,6 @@ public class DeleteNotExistsExecutor implements StatementExecutor
{ {
// Process batch // Process batch
primaryId = processPrimaryTableResultSet(primaryPrepStmt, secondaryPrepStmts, deletePrepStmt, deleteIds, primaryTableName, primaryColumnName, tableColumn); primaryId = processPrimaryTableResultSet(primaryPrepStmt, secondaryPrepStmts, deletePrepStmt, deleteIds, primaryTableName, primaryColumnName, tableColumn);
connection.commit();
if (primaryId == null) if (primaryId == null)
{ {
@@ -299,6 +298,7 @@ public class DeleteNotExistsExecutor implements StatementExecutor
if (deleteIds.size() == deleteBatchSize) if (deleteIds.size() == deleteBatchSize)
{ {
deleteFromPrimaryTable(deletePrepStmt, deleteIds, primaryTableName); deleteFromPrimaryTable(deletePrepStmt, deleteIds, primaryTableName);
connection.commit();
} }
if (!resultSet.next()) if (!resultSet.next())

View File

@@ -126,7 +126,6 @@ public class MySQLDeleteNotExistsExecutor extends DeleteNotExistsExecutor
{ {
// Process batch // Process batch
primaryId = processPrimaryTableResultSet(primaryPrepStmt, secondaryPrepStmts, deletePrepStmt, deleteIds, primaryTableName, primaryColumnName, tableColumn); primaryId = processPrimaryTableResultSet(primaryPrepStmt, secondaryPrepStmts, deletePrepStmt, deleteIds, primaryTableName, primaryColumnName, tableColumn);
connection.commit();
if (primaryId == null) if (primaryId == null)
{ {