mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-07-24 17:32:48 +00:00
ACS 9256 improve async tests stability (#3191)
* ACS-9256 Improved stability in DynamicallySizedThreadPoolExecutorTest * ACS-9256 Removed unused unstable test in SpringAwareUserTransactionTest * ACS-9256 Improved stability in DynamicallySizedThreadPoolExecutorTest * ACS-9256 Improved stability in ActionServiceImplTest and RuleServiceCoverageTest * ACS-9256 Improved stability in ActionTrackingServiceImplTest * ACS-9256 Improved performance in ComparePropertyValueEvaluatorTest * ACS-9256 Improved performance in LockServiceImplTest * ACS-9256 Improved stability in LockBehaviourImplTest * ACS-9256 Improved stability in ContentMetadataExtracterTest * ACS-9256 Removed unstable and unused tests * ACS-9256 Improve stability in CachedContentCleanupJobTest * ACS-9256 Pre-commit fixes
This commit is contained in:
@@ -1431,26 +1431,6 @@
|
||||
"is_secret": false
|
||||
}
|
||||
],
|
||||
"repository/src/test/java/org/alfresco/repo/lock/LockBehaviourImplTest.java": [
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
"filename": "repository/src/test/java/org/alfresco/repo/lock/LockBehaviourImplTest.java",
|
||||
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
|
||||
"is_verified": false,
|
||||
"line_number": 112,
|
||||
"is_secret": false
|
||||
}
|
||||
],
|
||||
"repository/src/test/java/org/alfresco/repo/lock/LockServiceImplTest.java": [
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
"filename": "repository/src/test/java/org/alfresco/repo/lock/LockServiceImplTest.java",
|
||||
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
|
||||
"is_verified": false,
|
||||
"line_number": 103,
|
||||
"is_secret": false
|
||||
}
|
||||
],
|
||||
"repository/src/test/java/org/alfresco/repo/management/JmxDumpUtilTest.java": [
|
||||
{
|
||||
"type": "Secret Keyword",
|
||||
@@ -1888,5 +1868,5 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"generated_at": "2024-12-19T08:58:42Z"
|
||||
"generated_at": "2025-02-11T13:28:51Z"
|
||||
}
|
||||
|
@@ -145,6 +145,12 @@
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.awaitility</groupId>
|
||||
<artifactId>awaitility</artifactId>
|
||||
<version>${dependency.awaitility.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2005-2014 Alfresco Software Limited.
|
||||
* Copyright (C) 2005-2025 Alfresco Software Limited.
|
||||
*
|
||||
* This file is part of Alfresco
|
||||
*
|
||||
@@ -18,6 +18,9 @@
|
||||
*/
|
||||
package org.alfresco.util;
|
||||
|
||||
import static org.awaitility.Awaitility.await;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.concurrent.BlockingQueue;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
@@ -26,20 +29,20 @@ import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
/**
|
||||
* Tests for our instance of {@link java.util.concurrent.ThreadPoolExecutor}
|
||||
*
|
||||
*
|
||||
* @author Nick Burch
|
||||
*/
|
||||
public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||
{
|
||||
|
||||
private static Log logger = LogFactory.getLog(DynamicallySizedThreadPoolExecutorTest.class);
|
||||
private static final Duration MAX_WAIT_TIMEOUT = Duration.ofSeconds(1);
|
||||
private static final Log logger = LogFactory.getLog(DynamicallySizedThreadPoolExecutorTest.class);
|
||||
private static final int DEFAULT_KEEP_ALIVE_TIME = 90;
|
||||
|
||||
@Override
|
||||
@@ -48,9 +51,9 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||
SleepUntilAllWake.reset();
|
||||
}
|
||||
|
||||
public void testUpToCore() throws Exception
|
||||
public void testUpToCore()
|
||||
{
|
||||
DynamicallySizedThreadPoolExecutor exec = createInstance(5,10, DEFAULT_KEEP_ALIVE_TIME);
|
||||
DynamicallySizedThreadPoolExecutor exec = createInstance(5, 10, DEFAULT_KEEP_ALIVE_TIME);
|
||||
|
||||
assertEquals(0, exec.getPoolSize());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
@@ -61,15 +64,15 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||
assertEquals(4, exec.getPoolSize());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
assertEquals(5, exec.getPoolSize());
|
||||
|
||||
|
||||
SleepUntilAllWake.wakeAll();
|
||||
Thread.sleep(100);
|
||||
waitForPoolSizeEquals(exec, 5);
|
||||
assertEquals(5, exec.getPoolSize());
|
||||
}
|
||||
|
||||
public void testPastCoreButNotHugeQueue() throws Exception
|
||||
public void testPastCoreButNotHugeQueue()
|
||||
{
|
||||
DynamicallySizedThreadPoolExecutor exec = createInstance(5,10, DEFAULT_KEEP_ALIVE_TIME);
|
||||
DynamicallySizedThreadPoolExecutor exec = createInstance(5, 10, DEFAULT_KEEP_ALIVE_TIME);
|
||||
|
||||
assertEquals(0, exec.getPoolSize());
|
||||
assertEquals(0, exec.getQueue().size());
|
||||
@@ -80,7 +83,7 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
assertEquals(5, exec.getPoolSize());
|
||||
assertEquals(0, exec.getQueue().size());
|
||||
|
||||
|
||||
// Need to hit max pool size before it adds more
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
@@ -89,20 +92,20 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
assertEquals(5, exec.getPoolSize());
|
||||
assertEquals(5, exec.getQueue().size());
|
||||
|
||||
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
assertEquals(5, exec.getPoolSize());
|
||||
assertEquals(7, exec.getQueue().size());
|
||||
|
||||
|
||||
SleepUntilAllWake.wakeAll();
|
||||
Thread.sleep(100);
|
||||
waitForPoolSizeEquals(exec, 5);
|
||||
assertEquals(5, exec.getPoolSize());
|
||||
}
|
||||
|
||||
|
||||
public void testToExpandQueue() throws Exception
|
||||
{
|
||||
DynamicallySizedThreadPoolExecutor exec = createInstance(2,4,1);
|
||||
DynamicallySizedThreadPoolExecutor exec = createInstance(2, 4, 5);
|
||||
|
||||
assertEquals(0, exec.getPoolSize());
|
||||
assertEquals(0, exec.getQueue().size());
|
||||
@@ -110,166 +113,37 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
assertEquals(2, exec.getPoolSize());
|
||||
assertEquals(0, exec.getQueue().size());
|
||||
|
||||
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
assertEquals(2, exec.getPoolSize());
|
||||
assertEquals(3, exec.getQueue().size());
|
||||
|
||||
|
||||
// Next should add one
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
Thread.sleep(20); // Let the new thread spin up
|
||||
waitForPoolSizeEquals(exec, 3); // Let the new thread spin up
|
||||
assertEquals(3, exec.getPoolSize());
|
||||
assertEquals(3, exec.getQueue().size());
|
||||
|
||||
// And again
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
Thread.sleep(20); // Let the new thread spin up
|
||||
waitForPoolSizeEquals(exec, 4); // Let the new thread spin up
|
||||
assertEquals(4, exec.getPoolSize());
|
||||
assertEquals(3, exec.getQueue().size());
|
||||
|
||||
|
||||
// But no more will be added, as we're at max
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
assertEquals(4, exec.getPoolSize());
|
||||
assertEquals(6, exec.getQueue().size());
|
||||
|
||||
SleepUntilAllWake.wakeAll();
|
||||
Thread.sleep(100);
|
||||
|
||||
// All threads still running, as 1 second timeout
|
||||
assertEquals(4, exec.getPoolSize());
|
||||
}
|
||||
|
||||
public void offTestToExpandThenContract() throws Exception
|
||||
{
|
||||
DynamicallySizedThreadPoolExecutor exec = createInstance(2,4,1);
|
||||
exec.setKeepAliveTime(30, TimeUnit.MILLISECONDS);
|
||||
|
||||
assertEquals(0, exec.getPoolSize());
|
||||
assertEquals(0, exec.getQueue().size());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
assertEquals(2, exec.getPoolSize());
|
||||
assertEquals(0, exec.getQueue().size());
|
||||
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
assertEquals(2, exec.getPoolSize());
|
||||
assertEquals(3, exec.getQueue().size());
|
||||
|
||||
// Next should add one
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
Thread.sleep(20); // Let the new thread spin up
|
||||
assertEquals(3, exec.getPoolSize());
|
||||
assertEquals(3, exec.getQueue().size());
|
||||
SleepUntilAllWake.wakeAll();
|
||||
Thread.sleep(100);
|
||||
|
||||
// And again
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
Thread.sleep(20); // Let the new thread spin up
|
||||
// All threads still running, as 5 second timeout
|
||||
assertEquals(4, exec.getPoolSize());
|
||||
assertEquals(3, exec.getQueue().size());
|
||||
|
||||
// But no more will be added, as we're at max
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
assertEquals(4, exec.getPoolSize());
|
||||
assertEquals(6, exec.getQueue().size());
|
||||
|
||||
SleepUntilAllWake.wakeAll();
|
||||
Thread.sleep(100);
|
||||
|
||||
// Wait longer than the timeout without any work, which should
|
||||
// let all the extra threads go away
|
||||
// (Depending on how closely your JVM follows the specification,
|
||||
// we may fall back to the core size which is correct, or we
|
||||
// may go to zero which is wrong, but hey, it's the JVM...)
|
||||
logger.debug("Core pool size is " + exec.getCorePoolSize());
|
||||
logger.debug("Current pool size is " + exec.getPoolSize());
|
||||
logger.debug("Queue size is " + exec.getQueue().size());
|
||||
assertTrue(
|
||||
"Pool size should be 0-2 as everything is idle, was " + exec.getPoolSize(),
|
||||
exec.getPoolSize() >= 0
|
||||
);
|
||||
assertTrue(
|
||||
"Pool size should be 0-2 as everything is idle, was " + exec.getPoolSize(),
|
||||
exec.getPoolSize() <= 2
|
||||
);
|
||||
|
||||
SleepUntilAllWake.reset();
|
||||
|
||||
// Add 2 new jobs, will stay/ go to at 2 threads
|
||||
assertEquals(0, exec.getQueue().size());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
|
||||
// Let the idle threads grab them, then check
|
||||
Thread.sleep(20);
|
||||
assertEquals(2, exec.getPoolSize());
|
||||
assertEquals(0, exec.getQueue().size());
|
||||
|
||||
// 3 more, still at 2 threads
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
assertEquals(2, exec.getPoolSize());
|
||||
assertEquals(3, exec.getQueue().size());
|
||||
|
||||
// And again wait for it all
|
||||
SleepUntilAllWake.wakeAll();
|
||||
Thread.sleep(100);
|
||||
assertEquals(2, exec.getPoolSize());
|
||||
|
||||
|
||||
// Now decrease the overall pool size
|
||||
// Will rise and fall to there now
|
||||
exec.setCorePoolSize(1);
|
||||
|
||||
// Run a quick job, to ensure that the
|
||||
// "can I kill one yet" logic is applied
|
||||
SleepUntilAllWake.reset();
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
SleepUntilAllWake.wakeAll();
|
||||
|
||||
Thread.sleep(100);
|
||||
assertEquals(1, exec.getPoolSize());
|
||||
assertEquals(0, exec.getQueue().size());
|
||||
|
||||
SleepUntilAllWake.reset();
|
||||
|
||||
|
||||
// Push enough on to go up to 4 active threads
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
exec.execute(new SleepUntilAllWake());
|
||||
|
||||
Thread.sleep(20); // Let the new threads spin up
|
||||
assertEquals(4, exec.getPoolSize());
|
||||
assertEquals(6, exec.getQueue().size());
|
||||
|
||||
// Wait for them all to finish, should drop back to 1 now
|
||||
// (Or zero, if your JVM can't read the specification...)
|
||||
SleepUntilAllWake.wakeAll();
|
||||
Thread.sleep(100);
|
||||
assertTrue(
|
||||
"Pool size should be 0 or 1 as everything is idle, was " + exec.getPoolSize(),
|
||||
exec.getPoolSize() >= 0
|
||||
);
|
||||
assertTrue(
|
||||
"Pool size should be 0 or 1 as everything is idle, was " + exec.getPoolSize(),
|
||||
exec.getPoolSize() <= 1
|
||||
);
|
||||
}
|
||||
|
||||
private DynamicallySizedThreadPoolExecutor createInstance(int corePoolSize, int maximumPoolSize, int keepAliveTime)
|
||||
@@ -291,6 +165,11 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||
new ThreadPoolExecutor.CallerRunsPolicy());
|
||||
}
|
||||
|
||||
private void waitForPoolSizeEquals(DynamicallySizedThreadPoolExecutor exec, int expectedSize)
|
||||
{
|
||||
await().atMost(MAX_WAIT_TIMEOUT).until(() -> exec.getPoolSize() == expectedSize);
|
||||
}
|
||||
|
||||
public static class SleepUntilAllWake implements Runnable
|
||||
{
|
||||
private static ConcurrentMap<String, Thread> sleeping = new ConcurrentHashMap<String, Thread>();
|
||||
@@ -299,31 +178,33 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||
@Override
|
||||
public void run()
|
||||
{
|
||||
if(allAwake) return;
|
||||
|
||||
if (allAwake)
|
||||
return;
|
||||
|
||||
// Track us, and wait for the bang
|
||||
logger.debug("Adding thread: " + Thread.currentThread().getName());
|
||||
sleeping.put(Thread.currentThread().getName(), Thread.currentThread());
|
||||
try
|
||||
{
|
||||
Thread.sleep(30*1000);
|
||||
Thread.sleep(30 * 1000);
|
||||
System.err.println("Warning - Thread finished sleeping without wake!");
|
||||
}
|
||||
catch(InterruptedException e)
|
||||
catch (InterruptedException e)
|
||||
{
|
||||
logger.debug("Interrupted thread: " + Thread.currentThread().getName());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static void wakeAll()
|
||||
{
|
||||
allAwake = true;
|
||||
for(Entry<String, Thread> t : sleeping.entrySet())
|
||||
for (Entry<String, Thread> t : sleeping.entrySet())
|
||||
{
|
||||
logger.debug("Interrupting thread: " + t.getKey());
|
||||
t.getValue().interrupt();
|
||||
}
|
||||
}
|
||||
|
||||
public static void reset()
|
||||
{
|
||||
logger.debug("Resetting.");
|
||||
|
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2005-2023 Alfresco Software Limited.
|
||||
* Copyright (C) 2005-2025 Alfresco Software Limited.
|
||||
*
|
||||
* This file is part of Alfresco
|
||||
*
|
||||
@@ -20,13 +20,11 @@ package org.alfresco.util.transaction;
|
||||
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Objects;
|
||||
|
||||
import jakarta.transaction.RollbackException;
|
||||
import jakarta.transaction.Status;
|
||||
import jakarta.transaction.UserTransaction;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.springframework.transaction.CannotCreateTransactionException;
|
||||
import org.springframework.transaction.NoTransactionException;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
@@ -35,21 +33,20 @@ import org.springframework.transaction.support.AbstractPlatformTransactionManage
|
||||
import org.springframework.transaction.support.DefaultTransactionStatus;
|
||||
|
||||
/**
|
||||
* @see org.alfresco.util.transaction.SpringAwareUserTransaction
|
||||
*
|
||||
* @author Derek Hulley
|
||||
* @see org.alfresco.util.transaction.SpringAwareUserTransaction
|
||||
*/
|
||||
public class SpringAwareUserTransactionTest extends TestCase
|
||||
{
|
||||
private DummyTransactionManager transactionManager;
|
||||
private FailingTransactionManager failingTransactionManager;
|
||||
private UserTransaction txn;
|
||||
|
||||
|
||||
public SpringAwareUserTransactionTest()
|
||||
{
|
||||
super();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void setUp() throws Exception
|
||||
{
|
||||
@@ -57,7 +54,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
failingTransactionManager = new FailingTransactionManager();
|
||||
txn = getTxn();
|
||||
}
|
||||
|
||||
|
||||
private UserTransaction getTxn()
|
||||
{
|
||||
return new SpringAwareUserTransaction(
|
||||
@@ -67,13 +64,13 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
TransactionDefinition.PROPAGATION_REQUIRED,
|
||||
TransactionDefinition.TIMEOUT_DEFAULT);
|
||||
}
|
||||
|
||||
|
||||
public void testSetUp() throws Exception
|
||||
{
|
||||
assertNotNull(transactionManager);
|
||||
assertNotNull(txn);
|
||||
}
|
||||
|
||||
|
||||
private void checkNoStatusOnThread()
|
||||
{
|
||||
try
|
||||
@@ -86,7 +83,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
// expected
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void testNoTxnStatus() throws Exception
|
||||
{
|
||||
checkNoStatusOnThread();
|
||||
@@ -134,7 +131,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
}
|
||||
checkNoStatusOnThread();
|
||||
}
|
||||
|
||||
|
||||
public void testSimpleTxnWithRollback() throws Exception
|
||||
{
|
||||
testNoTxnStatus();
|
||||
@@ -156,7 +153,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
transactionManager.getStatus());
|
||||
checkNoStatusOnThread();
|
||||
}
|
||||
|
||||
|
||||
public void testNoBeginCommit() throws Exception
|
||||
{
|
||||
testNoTxnStatus();
|
||||
@@ -171,7 +168,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
}
|
||||
checkNoStatusOnThread();
|
||||
}
|
||||
|
||||
|
||||
public void testPostRollbackCommitDetection() throws Exception
|
||||
{
|
||||
testNoTxnStatus();
|
||||
@@ -189,7 +186,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
}
|
||||
checkNoStatusOnThread();
|
||||
}
|
||||
|
||||
|
||||
public void testPostSetRollbackOnlyCommitDetection() throws Exception
|
||||
{
|
||||
testNoTxnStatus();
|
||||
@@ -208,7 +205,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
}
|
||||
checkNoStatusOnThread();
|
||||
}
|
||||
|
||||
|
||||
public void testMismatchedBeginCommit() throws Exception
|
||||
{
|
||||
UserTransaction txn1 = getTxn();
|
||||
@@ -218,18 +215,18 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
|
||||
txn1.begin();
|
||||
txn2.begin();
|
||||
|
||||
|
||||
txn2.commit();
|
||||
txn1.commit();
|
||||
|
||||
|
||||
checkNoStatusOnThread();
|
||||
|
||||
|
||||
txn1 = getTxn();
|
||||
txn2 = getTxn();
|
||||
|
||||
|
||||
txn1.begin();
|
||||
txn2.begin();
|
||||
|
||||
|
||||
try
|
||||
{
|
||||
txn1.commit();
|
||||
@@ -245,58 +242,6 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
checkNoStatusOnThread();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test for leaked transactions (no guarantee it will succeed due to reliance
|
||||
* on garbage collector), so disabled by default.
|
||||
*
|
||||
* Also, if it succeeds, transaction call stack tracing will be enabled
|
||||
* potentially hitting the performance of all subsequent tests.
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
public void xtestLeakedTransactionLogging() throws Exception
|
||||
{
|
||||
assertFalse(SpringAwareUserTransaction.isCallStackTraced());
|
||||
|
||||
TrxThread t1 = new TrxThread();
|
||||
t1.start();
|
||||
System.gc();
|
||||
Thread.sleep(1000);
|
||||
|
||||
TrxThread t2 = new TrxThread();
|
||||
t2.start();
|
||||
System.gc();
|
||||
Thread.sleep(1000);
|
||||
|
||||
assertTrue(SpringAwareUserTransaction.isCallStackTraced());
|
||||
|
||||
TrxThread t3 = new TrxThread();
|
||||
t3.start();
|
||||
System.gc();
|
||||
Thread.sleep(3000);
|
||||
System.gc();
|
||||
Thread.sleep(3000);
|
||||
}
|
||||
|
||||
private class TrxThread extends Thread
|
||||
{
|
||||
public void run()
|
||||
{
|
||||
try
|
||||
{
|
||||
getTrx();
|
||||
}
|
||||
catch (Exception e) {}
|
||||
}
|
||||
|
||||
public void getTrx() throws Exception
|
||||
{
|
||||
UserTransaction txn = getTxn();
|
||||
txn.begin();
|
||||
txn = null;
|
||||
}
|
||||
}
|
||||
|
||||
public void testConnectionPoolException() throws Exception
|
||||
{
|
||||
testNoTxnStatus();
|
||||
@@ -311,7 +256,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
// Expected fail
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private UserTransaction getFailingTxn()
|
||||
{
|
||||
return new SpringAwareUserTransaction(
|
||||
@@ -321,7 +266,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
TransactionDefinition.PROPAGATION_REQUIRED,
|
||||
TransactionDefinition.TIMEOUT_DEFAULT);
|
||||
}
|
||||
|
||||
|
||||
public void testTransactionListenerOrder() throws Throwable
|
||||
{
|
||||
testNoTxnStatus();
|
||||
@@ -360,12 +305,12 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
}
|
||||
checkNoStatusOnThread();
|
||||
}
|
||||
|
||||
|
||||
private static class TestTransactionListener extends TransactionListenerAdapter
|
||||
{
|
||||
private final String name;
|
||||
private final StringBuffer buffer;
|
||||
|
||||
|
||||
public TestTransactionListener(String name, StringBuffer buffer)
|
||||
{
|
||||
Objects.requireNonNull(name);
|
||||
@@ -373,18 +318,18 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
this.name = name;
|
||||
this.buffer = buffer;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void beforeCommit(boolean readOnly)
|
||||
{
|
||||
buffer.append(name);
|
||||
}
|
||||
|
||||
|
||||
public String getName()
|
||||
{
|
||||
return name;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj)
|
||||
{
|
||||
@@ -394,17 +339,17 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int hashCode()
|
||||
{
|
||||
return name.hashCode();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Used to check that the transaction manager is being called correctly
|
||||
*
|
||||
*
|
||||
* @author Derek Hulley
|
||||
*/
|
||||
@SuppressWarnings("serial")
|
||||
@@ -412,7 +357,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
{
|
||||
private int status = Status.STATUS_NO_TRANSACTION;
|
||||
private Object txn = new Object();
|
||||
|
||||
|
||||
/**
|
||||
* @return Returns one of the {@link Status Status.STATUS_XXX} constants
|
||||
*/
|
||||
@@ -441,10 +386,10 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
status = Status.STATUS_ROLLEDBACK;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Throws {@link NoSuchElementException} on begin()
|
||||
*
|
||||
*
|
||||
* @author alex.mukha
|
||||
*/
|
||||
private static class FailingTransactionManager extends AbstractPlatformTransactionManager
|
||||
@@ -452,7 +397,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
||||
private static final long serialVersionUID = 1L;
|
||||
private int status = Status.STATUS_NO_TRANSACTION;
|
||||
private Object txn = new Object();
|
||||
|
||||
|
||||
/**
|
||||
* @return Returns one of the {@link Status Status.STATUS_XXX} constants
|
||||
*/
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,363 +1,354 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2023 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
/*
|
||||
* Copyright (C) 2005 Jesper Steen M<>ller
|
||||
*
|
||||
* This file is part of Alfresco
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
package org.alfresco.repo.action.executer;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.action.ActionImpl;
|
||||
import org.alfresco.repo.content.MimetypeMap;
|
||||
import org.alfresco.repo.content.metadata.AbstractMappingMetadataExtracter;
|
||||
import org.alfresco.repo.content.metadata.MetadataExtracterRegistry;
|
||||
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationComponent;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.cmr.repository.ContentService;
|
||||
import org.alfresco.service.cmr.repository.ContentWriter;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.alfresco.test_category.BaseSpringTestsCategory;
|
||||
import org.alfresco.util.BaseSpringTest;
|
||||
import org.alfresco.util.GUID;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.springframework.test.context.transaction.TestTransaction;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Test of the ActionExecuter for extracting metadata.
|
||||
*
|
||||
* @author Jesper Steen Møller
|
||||
*/
|
||||
@Category(BaseSpringTestsCategory.class)
|
||||
@Transactional
|
||||
public class ContentMetadataExtracterTest extends BaseSpringTest
|
||||
{
|
||||
protected static final String QUICK_TITLE = "The quick brown fox jumps over the lazy dog";
|
||||
protected static final String QUICK_DESCRIPTION = "Pangram, fox, dog, Gym class featuring a brown fox and lazy dog";
|
||||
protected static final String QUICK_CREATOR = "Nevin Nollop";
|
||||
|
||||
private NodeService nodeService;
|
||||
private ContentService contentService;
|
||||
private MetadataExtracterRegistry registry;
|
||||
private TransactionService transactionService;
|
||||
private StoreRef testStoreRef;
|
||||
private NodeRef rootNodeRef;
|
||||
private NodeRef nodeRef;
|
||||
|
||||
private ContentMetadataExtracter executer;
|
||||
|
||||
private final static String ID = GUID.generate();
|
||||
|
||||
@Before
|
||||
public void before() throws Exception
|
||||
{
|
||||
this.nodeService = (NodeService) this.applicationContext.getBean("nodeService");
|
||||
this.contentService = (ContentService) this.applicationContext.getBean("contentService");
|
||||
registry = (MetadataExtracterRegistry) applicationContext.getBean("metadataExtracterRegistry");
|
||||
transactionService = (TransactionService) this.applicationContext.getBean("transactionService");
|
||||
|
||||
AuthenticationComponent authenticationComponent = (AuthenticationComponent)applicationContext.getBean("authenticationComponent");
|
||||
authenticationComponent.setSystemUserAsCurrentUser();
|
||||
|
||||
// Create the store and get the root node
|
||||
this.testStoreRef = this.nodeService.createStore(
|
||||
StoreRef.PROTOCOL_WORKSPACE,
|
||||
"Test_" + System.currentTimeMillis());
|
||||
this.rootNodeRef = this.nodeService.getRootNode(this.testStoreRef);
|
||||
|
||||
// Create the node used for tests
|
||||
this.nodeRef = this.nodeService.createNode(
|
||||
this.rootNodeRef, ContentModel.ASSOC_CHILDREN,
|
||||
QName.createQName("{test}testnode"),
|
||||
ContentModel.TYPE_CONTENT).getChildRef();
|
||||
|
||||
// Setup the content from the PDF test data
|
||||
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
|
||||
cw.setMimetype(MimetypeMap.MIMETYPE_PDF);
|
||||
cw.putContent(AbstractContentTransformerTest.loadQuickTestFile("pdf"));
|
||||
|
||||
// Get the executer instance
|
||||
this.executer = (ContentMetadataExtracter) this.applicationContext.getBean("extract-metadata");
|
||||
}
|
||||
|
||||
/**
|
||||
* Test execution of the extraction itself
|
||||
*/
|
||||
@Test
|
||||
public void testFromBlanks() throws Exception
|
||||
{
|
||||
// Test that the action writes properties when they don't exist or are
|
||||
// unset
|
||||
|
||||
// Get the old props
|
||||
Map<QName, Serializable> props = this.nodeService.getProperties(this.nodeRef);
|
||||
props.remove(ContentModel.PROP_AUTHOR);
|
||||
props.put(ContentModel.PROP_TITLE, "");
|
||||
props.put(ContentModel.PROP_DESCRIPTION, null); // Wonder how this will
|
||||
// be handled
|
||||
this.nodeService.setProperties(this.nodeRef, props);
|
||||
|
||||
// Make the nodeRef visible to other transactions as it will need to be in async requests
|
||||
TestTransaction.flagForCommit();
|
||||
TestTransaction.end();
|
||||
|
||||
// Execute the action
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>()
|
||||
{
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
|
||||
executer.execute(action, nodeRef);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
Thread.sleep(3000); // Need to wait for the async extract
|
||||
|
||||
// Check that the properties have been set
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>()
|
||||
{
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
assertEquals(QUICK_TITLE, nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE));
|
||||
assertEquals(QUICK_DESCRIPTION, nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION));
|
||||
assertEquals(QUICK_CREATOR, nodeService.getProperty(nodeRef, ContentModel.PROP_AUTHOR));
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private static final QName PROP_UNKNOWN_1 = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "unkown1");
|
||||
private static final QName PROP_UNKNOWN_2 = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "unkown2");
|
||||
private static class TestUnknownMetadataExtracter extends AbstractMappingMetadataExtracter
|
||||
{
|
||||
public TestUnknownMetadataExtracter()
|
||||
{
|
||||
Properties mappingProperties = new Properties();
|
||||
mappingProperties.put("unknown1", PROP_UNKNOWN_1.toString());
|
||||
mappingProperties.put("unknown2", PROP_UNKNOWN_2.toString());
|
||||
setMappingProperties(mappingProperties);
|
||||
}
|
||||
@Override
|
||||
protected Map<String, Set<QName>> getDefaultMapping()
|
||||
{
|
||||
// No need to give anything back as we have explicitly set the mapping already
|
||||
return new HashMap<String, Set<QName>>(0);
|
||||
}
|
||||
@Override
|
||||
public boolean isSupported(String sourceMimetype)
|
||||
{
|
||||
return sourceMimetype.equals(MimetypeMap.MIMETYPE_BINARY);
|
||||
}
|
||||
|
||||
public Map<String, Serializable> extractRaw(ContentReader reader) throws Throwable
|
||||
{
|
||||
Map<String, Serializable> rawMap = newRawMap();
|
||||
rawMap.put("unknown1", Integer.valueOf(1));
|
||||
rawMap.put("unknown2", "TWO");
|
||||
return rawMap;
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUnknownProperties()
|
||||
{
|
||||
TestUnknownMetadataExtracter extracterUnknown = new TestUnknownMetadataExtracter();
|
||||
extracterUnknown.setRegistry(registry);
|
||||
extracterUnknown.register();
|
||||
// Now add some content with a binary mimetype
|
||||
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
|
||||
cw.setMimetype(MimetypeMap.MIMETYPE_BINARY);
|
||||
cw.putContent("Content for " + getName());
|
||||
|
||||
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
|
||||
executer.execute(action, this.nodeRef);
|
||||
|
||||
// The unkown properties should be present
|
||||
Serializable prop1 = nodeService.getProperty(nodeRef, PROP_UNKNOWN_1);
|
||||
Serializable prop2 = nodeService.getProperty(nodeRef, PROP_UNKNOWN_2);
|
||||
|
||||
assertNotNull("Unknown property is null", prop1);
|
||||
assertNotNull("Unknown property is null", prop2);
|
||||
}
|
||||
|
||||
private static class TestNullPropMetadataExtracter extends AbstractMappingMetadataExtracter
|
||||
{
|
||||
public TestNullPropMetadataExtracter()
|
||||
{
|
||||
Properties mappingProperties = new Properties();
|
||||
mappingProperties.put("title", ContentModel.PROP_TITLE.toString());
|
||||
mappingProperties.put("description", ContentModel.PROP_DESCRIPTION.toString());
|
||||
setMappingProperties(mappingProperties);
|
||||
}
|
||||
@Override
|
||||
protected Map<String, Set<QName>> getDefaultMapping()
|
||||
{
|
||||
// No need to give anything back as we have explicitly set the mapping already
|
||||
return new HashMap<String, Set<QName>>(0);
|
||||
}
|
||||
@Override
|
||||
public boolean isSupported(String sourceMimetype)
|
||||
{
|
||||
return sourceMimetype.equals(MimetypeMap.MIMETYPE_BINARY);
|
||||
}
|
||||
|
||||
public Map<String, Serializable> extractRaw(ContentReader reader) throws Throwable
|
||||
{
|
||||
Map<String, Serializable> rawMap = newRawMap();
|
||||
putRawValue("title", null, rawMap);
|
||||
putRawValue("description", "", rawMap);
|
||||
return rawMap;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure that missing raw values result in node properties being removed
|
||||
* when running with {@link ContentMetadataExtracter#setCarryAspectProperties(boolean)}
|
||||
* set to <tt>false</tt>.
|
||||
*/
|
||||
@Test
|
||||
public void testNullExtractedValues_ALF1823()
|
||||
{
|
||||
TestNullPropMetadataExtracter extractor = new TestNullPropMetadataExtracter();
|
||||
extractor.setRegistry(registry);
|
||||
extractor.register();
|
||||
// Now set the title and description
|
||||
nodeService.setProperty(nodeRef, ContentModel.PROP_TITLE, "TITLE");
|
||||
nodeService.setProperty(nodeRef, ContentModel.PROP_DESCRIPTION, "DESCRIPTION");
|
||||
// Now add some content with a binary mimetype
|
||||
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
|
||||
cw.setMimetype(MimetypeMap.MIMETYPE_BINARY);
|
||||
cw.putContent("Content for " + getName());
|
||||
|
||||
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
|
||||
executer.execute(action, this.nodeRef);
|
||||
|
||||
// cm:titled properties should be present
|
||||
Serializable title = nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE);
|
||||
Serializable descr = nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION);
|
||||
|
||||
assertNotNull("cm:title property is null", title);
|
||||
assertNotNull("cm:description property is null", descr);
|
||||
|
||||
try
|
||||
{
|
||||
// Now change the setting to remove unset aspect properties
|
||||
executer.setCarryAspectProperties(false);
|
||||
// Extract again
|
||||
executer.execute(action, this.nodeRef);
|
||||
|
||||
// cm:titled properties should *NOT* be present
|
||||
title = nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE);
|
||||
descr = nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION);
|
||||
|
||||
assertNull("cm:title property is not null", title);
|
||||
assertNull("cm:description property is not null", descr);
|
||||
}
|
||||
finally
|
||||
{
|
||||
executer.setCarryAspectProperties(true);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test execution of the pragmatic approach
|
||||
*/
|
||||
@Test
|
||||
public void testFromPartial() throws Exception
|
||||
{
|
||||
// Test that the action does not overwrite properties that are already
|
||||
// set
|
||||
String myCreator = "Null-op";
|
||||
String myTitle = "The hot dog is eaten by the city fox";
|
||||
|
||||
// Get the old props
|
||||
Map<QName, Serializable> props = this.nodeService.getProperties(this.nodeRef);
|
||||
props.put(ContentModel.PROP_AUTHOR, myCreator);
|
||||
props.put(ContentModel.PROP_TITLE, myTitle);
|
||||
props.remove(ContentModel.PROP_DESCRIPTION); // Allow this baby
|
||||
this.nodeService.setProperties(this.nodeRef, props);
|
||||
|
||||
// Make the nodeRef visible to other transactions as it will need to be in async requests
|
||||
TestTransaction.flagForCommit();
|
||||
TestTransaction.end();
|
||||
|
||||
// Execute the action
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>()
|
||||
{
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
|
||||
executer.execute(action, nodeRef);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
Thread.sleep(3000); // Need to wait for the async extract
|
||||
|
||||
// Check that the properties have been preserved, but that description has been set
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>()
|
||||
{
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
assertEquals(myTitle, nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE));
|
||||
assertEquals(myCreator, nodeService.getProperty(nodeRef, ContentModel.PROP_AUTHOR));
|
||||
|
||||
assertEquals(QUICK_DESCRIPTION, nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION));
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.action.executer;
|
||||
|
||||
import static org.awaitility.Awaitility.await;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.springframework.test.context.transaction.TestTransaction;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.action.ActionImpl;
|
||||
import org.alfresco.repo.content.MimetypeMap;
|
||||
import org.alfresco.repo.content.metadata.AbstractMappingMetadataExtracter;
|
||||
import org.alfresco.repo.content.metadata.MetadataExtracterRegistry;
|
||||
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationComponent;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.cmr.repository.ContentService;
|
||||
import org.alfresco.service.cmr.repository.ContentWriter;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.alfresco.test_category.BaseSpringTestsCategory;
|
||||
import org.alfresco.util.BaseSpringTest;
|
||||
import org.alfresco.util.GUID;
|
||||
|
||||
/**
|
||||
* Test of the ActionExecuter for extracting metadata.
|
||||
*
|
||||
* @author Jesper Steen Møller
|
||||
*/
|
||||
@Category(BaseSpringTestsCategory.class)
|
||||
@Transactional
|
||||
public class ContentMetadataExtracterTest extends BaseSpringTest
|
||||
{
|
||||
protected static final String QUICK_TITLE = "The quick brown fox jumps over the lazy dog";
|
||||
protected static final String QUICK_DESCRIPTION = "Pangram, fox, dog, Gym class featuring a brown fox and lazy dog";
|
||||
protected static final String QUICK_CREATOR = "Nevin Nollop";
|
||||
|
||||
private NodeService nodeService;
|
||||
private ContentService contentService;
|
||||
private MetadataExtracterRegistry registry;
|
||||
private TransactionService transactionService;
|
||||
private StoreRef testStoreRef;
|
||||
private NodeRef rootNodeRef;
|
||||
private NodeRef nodeRef;
|
||||
|
||||
private ContentMetadataExtracter executer;
|
||||
|
||||
private final static String ID = GUID.generate();
|
||||
|
||||
@Before
|
||||
public void before() throws Exception
|
||||
{
|
||||
this.nodeService = (NodeService) this.applicationContext.getBean("nodeService");
|
||||
this.contentService = (ContentService) this.applicationContext.getBean("contentService");
|
||||
registry = (MetadataExtracterRegistry) applicationContext.getBean("metadataExtracterRegistry");
|
||||
transactionService = (TransactionService) this.applicationContext.getBean("transactionService");
|
||||
|
||||
AuthenticationComponent authenticationComponent = (AuthenticationComponent) applicationContext.getBean("authenticationComponent");
|
||||
authenticationComponent.setSystemUserAsCurrentUser();
|
||||
|
||||
// Create the store and get the root node
|
||||
this.testStoreRef = this.nodeService.createStore(
|
||||
StoreRef.PROTOCOL_WORKSPACE,
|
||||
"Test_" + System.currentTimeMillis());
|
||||
this.rootNodeRef = this.nodeService.getRootNode(this.testStoreRef);
|
||||
|
||||
// Create the node used for tests
|
||||
this.nodeRef = this.nodeService.createNode(
|
||||
this.rootNodeRef, ContentModel.ASSOC_CHILDREN,
|
||||
QName.createQName("{test}testnode"),
|
||||
ContentModel.TYPE_CONTENT).getChildRef();
|
||||
|
||||
// Setup the content from the PDF test data
|
||||
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
|
||||
cw.setMimetype(MimetypeMap.MIMETYPE_PDF);
|
||||
cw.putContent(AbstractContentTransformerTest.loadQuickTestFile("pdf"));
|
||||
|
||||
// Get the executer instance
|
||||
this.executer = (ContentMetadataExtracter) this.applicationContext.getBean("extract-metadata");
|
||||
}
|
||||
|
||||
/**
|
||||
* Test execution of the extraction itself
|
||||
*/
|
||||
@Test
|
||||
public void testFromBlanks() throws Exception
|
||||
{
|
||||
// Test that the action writes properties when they don't exist or are
|
||||
// unset
|
||||
|
||||
// Get the old props
|
||||
Map<QName, Serializable> props = this.nodeService.getProperties(this.nodeRef);
|
||||
props.remove(ContentModel.PROP_AUTHOR);
|
||||
props.put(ContentModel.PROP_TITLE, "");
|
||||
props.put(ContentModel.PROP_DESCRIPTION, null); // Wonder how this will
|
||||
// be handled
|
||||
this.nodeService.setProperties(this.nodeRef, props);
|
||||
|
||||
// Make the nodeRef visible to other transactions as it will need to be in async requests
|
||||
TestTransaction.flagForCommit();
|
||||
TestTransaction.end();
|
||||
|
||||
// Execute the action
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>() {
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
|
||||
executer.execute(action, nodeRef);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
// Need to wait for the async extract
|
||||
await().pollInSameThread()
|
||||
.atMost(MAX_ASYNC_TIMEOUT)
|
||||
.until(() -> nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION), Objects::nonNull);
|
||||
|
||||
// Check that the properties have been set
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>() {
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
assertEquals(QUICK_TITLE, nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE));
|
||||
assertEquals(QUICK_DESCRIPTION, nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION));
|
||||
assertEquals(QUICK_CREATOR, nodeService.getProperty(nodeRef, ContentModel.PROP_AUTHOR));
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private static final QName PROP_UNKNOWN_1 = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "unkown1");
|
||||
private static final QName PROP_UNKNOWN_2 = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "unkown2");
|
||||
|
||||
private static class TestUnknownMetadataExtracter extends AbstractMappingMetadataExtracter
|
||||
{
|
||||
public TestUnknownMetadataExtracter()
|
||||
{
|
||||
Properties mappingProperties = new Properties();
|
||||
mappingProperties.put("unknown1", PROP_UNKNOWN_1.toString());
|
||||
mappingProperties.put("unknown2", PROP_UNKNOWN_2.toString());
|
||||
setMappingProperties(mappingProperties);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Map<String, Set<QName>> getDefaultMapping()
|
||||
{
|
||||
// No need to give anything back as we have explicitly set the mapping already
|
||||
return new HashMap<String, Set<QName>>(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSupported(String sourceMimetype)
|
||||
{
|
||||
return sourceMimetype.equals(MimetypeMap.MIMETYPE_BINARY);
|
||||
}
|
||||
|
||||
public Map<String, Serializable> extractRaw(ContentReader reader) throws Throwable
|
||||
{
|
||||
Map<String, Serializable> rawMap = newRawMap();
|
||||
rawMap.put("unknown1", Integer.valueOf(1));
|
||||
rawMap.put("unknown2", "TWO");
|
||||
return rawMap;
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUnknownProperties()
|
||||
{
|
||||
TestUnknownMetadataExtracter extracterUnknown = new TestUnknownMetadataExtracter();
|
||||
extracterUnknown.setRegistry(registry);
|
||||
extracterUnknown.register();
|
||||
// Now add some content with a binary mimetype
|
||||
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
|
||||
cw.setMimetype(MimetypeMap.MIMETYPE_BINARY);
|
||||
cw.putContent("Content for " + getName());
|
||||
|
||||
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
|
||||
executer.execute(action, this.nodeRef);
|
||||
|
||||
// The unkown properties should be present
|
||||
Serializable prop1 = nodeService.getProperty(nodeRef, PROP_UNKNOWN_1);
|
||||
Serializable prop2 = nodeService.getProperty(nodeRef, PROP_UNKNOWN_2);
|
||||
|
||||
assertNotNull("Unknown property is null", prop1);
|
||||
assertNotNull("Unknown property is null", prop2);
|
||||
}
|
||||
|
||||
private static class TestNullPropMetadataExtracter extends AbstractMappingMetadataExtracter
|
||||
{
|
||||
public TestNullPropMetadataExtracter()
|
||||
{
|
||||
Properties mappingProperties = new Properties();
|
||||
mappingProperties.put("title", ContentModel.PROP_TITLE.toString());
|
||||
mappingProperties.put("description", ContentModel.PROP_DESCRIPTION.toString());
|
||||
setMappingProperties(mappingProperties);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Map<String, Set<QName>> getDefaultMapping()
|
||||
{
|
||||
// No need to give anything back as we have explicitly set the mapping already
|
||||
return new HashMap<String, Set<QName>>(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSupported(String sourceMimetype)
|
||||
{
|
||||
return sourceMimetype.equals(MimetypeMap.MIMETYPE_BINARY);
|
||||
}
|
||||
|
||||
public Map<String, Serializable> extractRaw(ContentReader reader) throws Throwable
|
||||
{
|
||||
Map<String, Serializable> rawMap = newRawMap();
|
||||
putRawValue("title", null, rawMap);
|
||||
putRawValue("description", "", rawMap);
|
||||
return rawMap;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure that missing raw values result in node properties being removed when running with {@link ContentMetadataExtracter#setCarryAspectProperties(boolean)} set to <tt>false</tt>.
|
||||
*/
|
||||
@Test
|
||||
public void testNullExtractedValues_ALF1823()
|
||||
{
|
||||
TestNullPropMetadataExtracter extractor = new TestNullPropMetadataExtracter();
|
||||
extractor.setRegistry(registry);
|
||||
extractor.register();
|
||||
// Now set the title and description
|
||||
nodeService.setProperty(nodeRef, ContentModel.PROP_TITLE, "TITLE");
|
||||
nodeService.setProperty(nodeRef, ContentModel.PROP_DESCRIPTION, "DESCRIPTION");
|
||||
// Now add some content with a binary mimetype
|
||||
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
|
||||
cw.setMimetype(MimetypeMap.MIMETYPE_BINARY);
|
||||
cw.putContent("Content for " + getName());
|
||||
|
||||
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
|
||||
executer.execute(action, this.nodeRef);
|
||||
|
||||
// cm:titled properties should be present
|
||||
Serializable title = nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE);
|
||||
Serializable descr = nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION);
|
||||
|
||||
assertNotNull("cm:title property is null", title);
|
||||
assertNotNull("cm:description property is null", descr);
|
||||
|
||||
try
|
||||
{
|
||||
// Now change the setting to remove unset aspect properties
|
||||
executer.setCarryAspectProperties(false);
|
||||
// Extract again
|
||||
executer.execute(action, this.nodeRef);
|
||||
|
||||
// cm:titled properties should *NOT* be present
|
||||
title = nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE);
|
||||
descr = nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION);
|
||||
|
||||
assertNull("cm:title property is not null", title);
|
||||
assertNull("cm:description property is not null", descr);
|
||||
}
|
||||
finally
|
||||
{
|
||||
executer.setCarryAspectProperties(true);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test execution of the pragmatic approach
|
||||
*/
|
||||
@Test
|
||||
public void testFromPartial() throws Exception
|
||||
{
|
||||
// Test that the action does not overwrite properties that are already
|
||||
// set
|
||||
String myCreator = "Null-op";
|
||||
String myTitle = "The hot dog is eaten by the city fox";
|
||||
|
||||
// Get the old props
|
||||
Map<QName, Serializable> props = this.nodeService.getProperties(this.nodeRef);
|
||||
props.put(ContentModel.PROP_AUTHOR, myCreator);
|
||||
props.put(ContentModel.PROP_TITLE, myTitle);
|
||||
props.remove(ContentModel.PROP_DESCRIPTION); // Allow this baby
|
||||
this.nodeService.setProperties(this.nodeRef, props);
|
||||
|
||||
// Make the nodeRef visible to other transactions as it will need to be in async requests
|
||||
TestTransaction.flagForCommit();
|
||||
TestTransaction.end();
|
||||
|
||||
// Execute the action
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>() {
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
|
||||
executer.execute(action, nodeRef);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
// Need to wait for the async extract
|
||||
await().pollInSameThread()
|
||||
.atMost(MAX_ASYNC_TIMEOUT)
|
||||
.until(() -> nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION), Objects::nonNull);
|
||||
|
||||
// Check that the properties have been preserved, but that description has been set
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>() {
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
assertEquals(myTitle, nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE));
|
||||
assertEquals(myCreator, nodeService.getProperty(nodeRef, ContentModel.PROP_AUTHOR));
|
||||
|
||||
assertEquals(QUICK_DESCRIPTION, nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION));
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@@ -1,478 +1,475 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.content.caching.cleanup;
|
||||
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.util.Calendar;
|
||||
import java.util.GregorianCalendar;
|
||||
|
||||
import org.alfresco.repo.content.caching.CacheFileProps;
|
||||
import org.alfresco.repo.content.caching.CachingContentStore;
|
||||
import org.alfresco.repo.content.caching.ContentCacheImpl;
|
||||
import org.alfresco.repo.content.caching.Key;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.util.ApplicationContextHelper;
|
||||
import org.alfresco.util.GUID;
|
||||
import org.alfresco.util.testing.category.LuceneTests;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
|
||||
/**
|
||||
* Tests for the CachedContentCleanupJob
|
||||
*
|
||||
* @author Matt Ward
|
||||
*/
|
||||
@Category(LuceneTests.class)
|
||||
public class CachedContentCleanupJobTest
|
||||
{
|
||||
private enum UrlSource { PROPS_FILE, REVERSE_CACHE_LOOKUP, NOT_PRESENT };
|
||||
private static ApplicationContext ctx;
|
||||
private CachingContentStore cachingStore;
|
||||
private ContentCacheImpl cache;
|
||||
private File cacheRoot;
|
||||
private CachedContentCleaner cleaner;
|
||||
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass()
|
||||
{
|
||||
String cleanerConf = "classpath:cachingstore/test-cleaner-context.xml";
|
||||
ctx = ApplicationContextHelper.getApplicationContext(new String[] { cleanerConf });
|
||||
}
|
||||
|
||||
|
||||
@Before
|
||||
public void setUp() throws IOException
|
||||
{
|
||||
cachingStore = (CachingContentStore) ctx.getBean("cachingContentStore");
|
||||
cache = (ContentCacheImpl) ctx.getBean("contentCache");
|
||||
cacheRoot = cache.getCacheRoot();
|
||||
cleaner = (CachedContentCleaner) ctx.getBean("cachedContentCleaner");
|
||||
cleaner.setMinFileAgeMillis(0);
|
||||
cleaner.setMaxDeleteWatchCount(0);
|
||||
|
||||
// Clear the cache from disk and memory
|
||||
cache.removeAll();
|
||||
FileUtils.cleanDirectory(cacheRoot);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void filesNotInCacheAreDeleted() throws InterruptedException
|
||||
{
|
||||
cleaner.setMaxDeleteWatchCount(0);
|
||||
int numFiles = 300; // Must be a multiple of number of UrlSource types being tested
|
||||
long totalSize = 0; // what is the total size of the sample files?
|
||||
File[] files = new File[numFiles];
|
||||
for (int i = 0; i < numFiles; i++)
|
||||
{
|
||||
// Testing with a number of files. The cached file cleaner will be able to determine the 'original'
|
||||
// content URL for each file by either retrieving from the companion properties file, or performing
|
||||
// a 'reverse lookup' in the cache (i.e. cache.contains(Key.forCacheFile(...))), or there will be no
|
||||
// URL determinable for the file.
|
||||
UrlSource urlSource = UrlSource.values()[i % UrlSource.values().length];
|
||||
File cacheFile = createCacheFile(urlSource, false);
|
||||
files[i] = cacheFile;
|
||||
totalSize += cacheFile.length();
|
||||
}
|
||||
|
||||
// Run cleaner
|
||||
cleaner.execute();
|
||||
|
||||
Thread.sleep(400);
|
||||
while (cleaner.isRunning())
|
||||
{
|
||||
Thread.sleep(200);
|
||||
}
|
||||
|
||||
// check all files deleted
|
||||
for (File file : files)
|
||||
{
|
||||
assertFalse("File should have been deleted: " + file, file.exists());
|
||||
}
|
||||
|
||||
assertEquals("Incorrect number of deleted files", numFiles, cleaner.getNumFilesDeleted());
|
||||
assertEquals("Incorrect total size of files deleted", totalSize, cleaner.getSizeFilesDeleted());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void filesNewerThanMinFileAgeMillisAreNotDeleted() throws InterruptedException
|
||||
{
|
||||
final long minFileAge = 5000;
|
||||
cleaner.setMinFileAgeMillis(minFileAge);
|
||||
cleaner.setMaxDeleteWatchCount(0);
|
||||
int numFiles = 10;
|
||||
|
||||
File[] oldFiles = new File[numFiles];
|
||||
for (int i = 0; i < numFiles; i++)
|
||||
{
|
||||
oldFiles[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, false);
|
||||
}
|
||||
|
||||
// Sleep to make sure 'old' files really are older than minFileAgeMillis
|
||||
Thread.sleep(minFileAge);
|
||||
|
||||
File[] newFiles = new File[numFiles];
|
||||
long newFilesTotalSize = 0;
|
||||
for (int i = 0; i < numFiles; i++)
|
||||
{
|
||||
newFiles[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, false);
|
||||
newFilesTotalSize += newFiles[i].length();
|
||||
}
|
||||
|
||||
|
||||
// The cleaner must finish before any of the newFiles are older than minFileAge. If the files are too
|
||||
// old the test will fail and it will be necessary to rethink how to test this.
|
||||
cleaner.execute();
|
||||
|
||||
Thread.sleep(400);
|
||||
while (cleaner.isRunning())
|
||||
{
|
||||
Thread.sleep(200);
|
||||
}
|
||||
|
||||
if (cleaner.getDurationMillis() > minFileAge)
|
||||
{
|
||||
fail("Test unable to complete, since cleaner took " + cleaner.getDurationMillis() + "ms" +
|
||||
" which is longer than minFileAge [" + minFileAge + "ms]");
|
||||
}
|
||||
|
||||
// check all 'old' files deleted
|
||||
for (File file : oldFiles)
|
||||
{
|
||||
assertFalse("File should have been deleted: " + file, file.exists());
|
||||
}
|
||||
// check all 'new' files still present
|
||||
for (File file : newFiles)
|
||||
{
|
||||
assertTrue("File should not have been deleted: " + file, file.exists());
|
||||
}
|
||||
|
||||
assertEquals("Incorrect number of deleted files", newFiles.length, cleaner.getNumFilesDeleted());
|
||||
assertEquals("Incorrect total size of files deleted", newFilesTotalSize, cleaner.getSizeFilesDeleted());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void aggressiveCleanReclaimsTargetSpace() throws InterruptedException
|
||||
{
|
||||
int numFiles = 30;
|
||||
File[] files = new File[numFiles];
|
||||
for (int i = 0; i < numFiles; i++)
|
||||
{
|
||||
// Make sure it's in the cache - all the files will be in the cache, so the
|
||||
// cleaner won't clean any up once it has finished aggressively reclaiming space.
|
||||
files[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, true);
|
||||
}
|
||||
|
||||
// How much space to reclaim - seven files worth (all files are same size)
|
||||
long fileSize = files[0].length();
|
||||
long sevenFilesSize = 7 * fileSize;
|
||||
|
||||
// We'll get it to clean seven files worth aggressively and then it will continue non-aggressively.
|
||||
// It will delete the older files aggressively (i.e. the ones prior to the two second sleep) and
|
||||
// then will examine the new files for potential deletion.
|
||||
// Since some of the newer files are not in the cache, it will delete those.
|
||||
cleaner.executeAggressive("aggressiveCleanReclaimsTargetSpace()", sevenFilesSize);
|
||||
|
||||
Thread.sleep(400);
|
||||
while (cleaner.isRunning())
|
||||
{
|
||||
Thread.sleep(200);
|
||||
}
|
||||
|
||||
int numDeleted = 0;
|
||||
|
||||
for (File f : files)
|
||||
{
|
||||
if (!f.exists())
|
||||
{
|
||||
numDeleted++;
|
||||
}
|
||||
}
|
||||
// How many were definitely deleted?
|
||||
assertEquals("Wrong number of files deleted", 7 , numDeleted);
|
||||
|
||||
// The cleaner should have recorded the correct number of deletions
|
||||
assertEquals("Incorrect number of deleted files", 7, cleaner.getNumFilesDeleted());
|
||||
assertEquals("Incorrect total size of files deleted", sevenFilesSize, cleaner.getSizeFilesDeleted());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void standardCleanAfterAggressiveFinished() throws InterruptedException
|
||||
{
|
||||
// Don't use numFiles > 59! as we're using this for the minute element in the cache file path.
|
||||
final int numFiles = 30;
|
||||
File[] files = new File[numFiles];
|
||||
|
||||
|
||||
for (int i = 0; i < numFiles; i++)
|
||||
{
|
||||
Calendar calendar = new GregorianCalendar(2010, 11, 2, 17, i);
|
||||
|
||||
if (i >= 21 && i <= 24)
|
||||
{
|
||||
// 21 to 24 will be deleted after the aggressive deletions (once the cleaner has returned
|
||||
// to normal cleaning), because they are not in the cache.
|
||||
files[i] = createCacheFile(calendar, UrlSource.NOT_PRESENT, false);
|
||||
}
|
||||
else
|
||||
{
|
||||
// All other files will be in the cache
|
||||
files[i] = createCacheFile(calendar, UrlSource.REVERSE_CACHE_LOOKUP, true);
|
||||
}
|
||||
}
|
||||
|
||||
// How much space to reclaim - seven files worth (all files are same size)
|
||||
long fileSize = files[0].length();
|
||||
long sevenFilesSize = 7 * fileSize;
|
||||
|
||||
// We'll get it to clean seven files worth aggressively and then it will continue non-aggressively.
|
||||
// It will delete the older files aggressively (i.e. even if they are actively in the cache) and
|
||||
// then will examine the new files for potential deletion.
|
||||
// Since some of the newer files are not in the cache, it will delete those too.
|
||||
cleaner.executeAggressive("standardCleanAfterAggressiveFinished()", sevenFilesSize);
|
||||
|
||||
Thread.sleep(400);
|
||||
while (cleaner.isRunning())
|
||||
{
|
||||
Thread.sleep(200);
|
||||
}
|
||||
|
||||
for (int i = 0; i < numFiles; i++)
|
||||
{
|
||||
if (i < 7)
|
||||
{
|
||||
assertFalse("First 7 files should have been aggressively cleaned", files[i].exists());
|
||||
}
|
||||
|
||||
if (i >= 21 && i <= 24)
|
||||
{
|
||||
assertFalse("Files with indexes 21-24 should have been deleted", files[i].exists());
|
||||
}
|
||||
}
|
||||
assertEquals("Incorrect number of deleted files", 11, cleaner.getNumFilesDeleted());
|
||||
assertEquals("Incorrect total size of files deleted", (11*fileSize), cleaner.getSizeFilesDeleted());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void emptyParentDirectoriesAreDeleted() throws FileNotFoundException
|
||||
{
|
||||
cleaner.setMaxDeleteWatchCount(0);
|
||||
File file = new File(cacheRoot, "243235984/a/b/c/d.bin");
|
||||
file.getParentFile().mkdirs();
|
||||
PrintWriter writer = new PrintWriter(file);
|
||||
writer.println("Content for emptyParentDirectoriesAreDeleted");
|
||||
writer.close();
|
||||
assertTrue("Directory should exist", new File(cacheRoot, "243235984/a/b/c").exists());
|
||||
|
||||
cleaner.handle(file);
|
||||
|
||||
assertFalse("Directory should have been deleted", new File(cacheRoot, "243235984").exists());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void markedFilesHaveDeletionDeferredUntilCorrectPassOfCleaner()
|
||||
{
|
||||
// A non-advisable setting but useful for testing, maxDeleteWatchCount of zero
|
||||
// which should result in immediate deletion upon discovery of content no longer in the cache.
|
||||
cleaner.setMaxDeleteWatchCount(0);
|
||||
File file = createCacheFile(UrlSource.NOT_PRESENT, false);
|
||||
|
||||
cleaner.handle(file);
|
||||
checkFilesDeleted(file);
|
||||
|
||||
// Anticipated to be the most common setting: maxDeleteWatchCount of 1.
|
||||
cleaner.setMaxDeleteWatchCount(1);
|
||||
file = createCacheFile(UrlSource.NOT_PRESENT, false);
|
||||
|
||||
cleaner.handle(file);
|
||||
checkWatchCountForCacheFile(file, 1);
|
||||
|
||||
cleaner.handle(file);
|
||||
checkFilesDeleted(file);
|
||||
|
||||
// Check that some other arbitrary figure for maxDeleteWatchCount works correctly.
|
||||
cleaner.setMaxDeleteWatchCount(3);
|
||||
file = createCacheFile(UrlSource.NOT_PRESENT, false);
|
||||
|
||||
cleaner.handle(file);
|
||||
checkWatchCountForCacheFile(file, 1);
|
||||
|
||||
cleaner.handle(file);
|
||||
checkWatchCountForCacheFile(file, 2);
|
||||
|
||||
cleaner.handle(file);
|
||||
checkWatchCountForCacheFile(file, 3);
|
||||
|
||||
cleaner.handle(file);
|
||||
checkFilesDeleted(file);
|
||||
}
|
||||
|
||||
|
||||
private void checkFilesDeleted(File file)
|
||||
{
|
||||
assertFalse("File should have been deleted: " + file, file.exists());
|
||||
CacheFileProps props = new CacheFileProps(file);
|
||||
assertFalse("Properties file should have been deleted, cache file: " + file, props.exists());
|
||||
}
|
||||
|
||||
|
||||
private void checkWatchCountForCacheFile(File file, Integer expectedWatchCount)
|
||||
{
|
||||
assertTrue("File should still exist: " + file, file.exists());
|
||||
CacheFileProps props = new CacheFileProps(file);
|
||||
props.load();
|
||||
assertEquals("File should contain correct deleteWatchCount", expectedWatchCount, props.getDeleteWatchCount());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void filesInCacheAreNotDeleted() throws InterruptedException
|
||||
{
|
||||
cleaner.setMaxDeleteWatchCount(0);
|
||||
|
||||
// The SlowContentStore will always give out content when asked,
|
||||
// so asking for any content will cause something to be cached.
|
||||
String url = makeContentUrl();
|
||||
int numFiles = 50;
|
||||
for (int i = 0; i < numFiles; i++)
|
||||
{
|
||||
ContentReader reader = cachingStore.getReader(url);
|
||||
reader.getContentString();
|
||||
}
|
||||
|
||||
cleaner.execute();
|
||||
|
||||
Thread.sleep(400);
|
||||
while (cleaner.isRunning())
|
||||
{
|
||||
Thread.sleep(200);
|
||||
}
|
||||
|
||||
for (int i = 0; i < numFiles; i++)
|
||||
{
|
||||
File cacheFile = new File(cache.getCacheFilePath(url));
|
||||
assertTrue("File should exist", cacheFile.exists());
|
||||
}
|
||||
}
|
||||
|
||||
private File createCacheFile(UrlSource urlSource, boolean putInCache)
|
||||
{
|
||||
Calendar calendar = new GregorianCalendar();
|
||||
return createCacheFile(calendar, urlSource, putInCache);
|
||||
}
|
||||
|
||||
private File createCacheFile(Calendar calendar, /*int year, int month, int day, int hour, int minute,*/
|
||||
UrlSource urlSource, boolean putInCache)
|
||||
{
|
||||
File file = new File(cacheRoot, createNewCacheFilePath(calendar));
|
||||
file.getParentFile().mkdirs();
|
||||
writeSampleContent(file);
|
||||
String contentUrl = makeContentUrl();
|
||||
|
||||
if (putInCache)
|
||||
{
|
||||
cache.putIntoLookup(Key.forUrl(contentUrl), file.getAbsolutePath());
|
||||
}
|
||||
|
||||
switch(urlSource)
|
||||
{
|
||||
case NOT_PRESENT:
|
||||
// cache won't be able to determine original content URL for the file
|
||||
break;
|
||||
case PROPS_FILE:
|
||||
// file with content URL in properties file
|
||||
CacheFileProps props = new CacheFileProps(file);
|
||||
props.setContentUrl(contentUrl);
|
||||
props.store();
|
||||
break;
|
||||
case REVERSE_CACHE_LOOKUP:
|
||||
// file with content URL in reverse lookup cache - but not 'in the cache' (forward lookup).
|
||||
cache.putIntoLookup(Key.forCacheFile(file), contentUrl);
|
||||
}
|
||||
assertTrue("File should exist", file.exists());
|
||||
return file;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Mimick functionality of ContentCacheImpl.createNewCacheFilePath()
|
||||
* but allowing a specific date (rather than 'now') to be used.
|
||||
*
|
||||
* @param calendar Calendar
|
||||
* @return Path to use for cache file.
|
||||
*/
|
||||
private String createNewCacheFilePath(Calendar calendar)
|
||||
{
|
||||
int year = calendar.get(Calendar.YEAR);
|
||||
int month = calendar.get(Calendar.MONTH) + 1; // 0-based
|
||||
int day = calendar.get(Calendar.DAY_OF_MONTH);
|
||||
int hour = calendar.get(Calendar.HOUR_OF_DAY);
|
||||
int minute = calendar.get(Calendar.MINUTE);
|
||||
// create the URL
|
||||
StringBuilder sb = new StringBuilder(20);
|
||||
sb.append(year).append('/')
|
||||
.append(month).append('/')
|
||||
.append(day).append('/')
|
||||
.append(hour).append('/')
|
||||
.append(minute).append('/')
|
||||
.append(GUID.generate()).append(".bin");
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
|
||||
private String makeContentUrl()
|
||||
{
|
||||
return "protocol://some/made/up/url/" + GUID.generate();
|
||||
}
|
||||
|
||||
|
||||
private void writeSampleContent(File file)
|
||||
{
|
||||
try
|
||||
{
|
||||
PrintWriter writer = new PrintWriter(file);
|
||||
writer.println("Content for sample file in " + getClass().getName());
|
||||
writer.close();
|
||||
}
|
||||
catch (Throwable e)
|
||||
{
|
||||
throw new RuntimeException("Couldn't write file: " + file, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.content.caching.cleanup;
|
||||
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.time.Duration;
|
||||
import java.util.Calendar;
|
||||
import java.util.GregorianCalendar;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
|
||||
import org.alfresco.repo.content.caching.CacheFileProps;
|
||||
import org.alfresco.repo.content.caching.CachingContentStore;
|
||||
import org.alfresco.repo.content.caching.ContentCacheImpl;
|
||||
import org.alfresco.repo.content.caching.Key;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.util.ApplicationContextHelper;
|
||||
import org.alfresco.util.GUID;
|
||||
import org.alfresco.util.testing.category.LuceneTests;
|
||||
|
||||
/**
|
||||
* Tests for the CachedContentCleanupJob
|
||||
*
|
||||
* @author Matt Ward
|
||||
*/
|
||||
@Category(LuceneTests.class)
|
||||
public class CachedContentCleanupJobTest
|
||||
{
|
||||
|
||||
private static final Duration MAX_WAIT_TIMEOUT = Duration.ofSeconds(10);
|
||||
|
||||
private enum UrlSource
|
||||
{
|
||||
PROPS_FILE, REVERSE_CACHE_LOOKUP, NOT_PRESENT
|
||||
}
|
||||
|
||||
;
|
||||
|
||||
private static ApplicationContext ctx;
|
||||
private CachingContentStore cachingStore;
|
||||
private ContentCacheImpl cache;
|
||||
private File cacheRoot;
|
||||
private CachedContentCleaner cleaner;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass()
|
||||
{
|
||||
String cleanerConf = "classpath:cachingstore/test-cleaner-context.xml";
|
||||
ctx = ApplicationContextHelper.getApplicationContext(new String[]{cleanerConf});
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() throws IOException
|
||||
{
|
||||
cachingStore = (CachingContentStore) ctx.getBean("cachingContentStore");
|
||||
cache = (ContentCacheImpl) ctx.getBean("contentCache");
|
||||
cacheRoot = cache.getCacheRoot();
|
||||
cleaner = (CachedContentCleaner) ctx.getBean("cachedContentCleaner");
|
||||
cleaner.setMinFileAgeMillis(0);
|
||||
cleaner.setMaxDeleteWatchCount(0);
|
||||
|
||||
// Clear the cache from disk and memory
|
||||
cache.removeAll();
|
||||
FileUtils.cleanDirectory(cacheRoot);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void filesNotInCacheAreDeleted() throws InterruptedException
|
||||
{
|
||||
cleaner.setMaxDeleteWatchCount(0);
|
||||
int numFiles = 300; // Must be a multiple of number of UrlSource types being tested
|
||||
long totalSize = 0; // what is the total size of the sample files?
|
||||
File[] files = new File[numFiles];
|
||||
for (int i = 0; i < numFiles; i++)
|
||||
{
|
||||
// Testing with a number of files. The cached file cleaner will be able to determine the 'original'
|
||||
// content URL for each file by either retrieving from the companion properties file, or performing
|
||||
// a 'reverse lookup' in the cache (i.e. cache.contains(Key.forCacheFile(...))), or there will be no
|
||||
// URL determinable for the file.
|
||||
UrlSource urlSource = UrlSource.values()[i % UrlSource.values().length];
|
||||
File cacheFile = createCacheFile(urlSource, false);
|
||||
files[i] = cacheFile;
|
||||
totalSize += cacheFile.length();
|
||||
}
|
||||
|
||||
// Run cleaner
|
||||
cleaner.execute();
|
||||
|
||||
await().pollDelay(Duration.ofMillis(100))
|
||||
.atMost(MAX_WAIT_TIMEOUT)
|
||||
.until(() -> !cleaner.isRunning());
|
||||
|
||||
// check all files deleted
|
||||
for (File file : files)
|
||||
{
|
||||
assertFalse("File should have been deleted: " + file, file.exists());
|
||||
}
|
||||
|
||||
assertEquals("Incorrect number of deleted files", numFiles, cleaner.getNumFilesDeleted());
|
||||
assertEquals("Incorrect total size of files deleted", totalSize, cleaner.getSizeFilesDeleted());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void filesNewerThanMinFileAgeMillisAreNotDeleted() throws InterruptedException
|
||||
{
|
||||
final long minFileAge = 5000;
|
||||
cleaner.setMinFileAgeMillis(minFileAge);
|
||||
cleaner.setMaxDeleteWatchCount(0);
|
||||
int numFiles = 10;
|
||||
|
||||
File[] oldFiles = new File[numFiles];
|
||||
for (int i = 0; i < numFiles; i++)
|
||||
{
|
||||
oldFiles[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, false);
|
||||
}
|
||||
|
||||
// Sleep to make sure 'old' files really are older than minFileAgeMillis
|
||||
Thread.sleep(minFileAge);
|
||||
|
||||
File[] newFiles = new File[numFiles];
|
||||
long newFilesTotalSize = 0;
|
||||
for (int i = 0; i < numFiles; i++)
|
||||
{
|
||||
newFiles[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, false);
|
||||
newFilesTotalSize += newFiles[i].length();
|
||||
}
|
||||
|
||||
// The cleaner must finish before any of the newFiles are older than minFileAge. If the files are too
|
||||
// old the test will fail and it will be necessary to rethink how to test this.
|
||||
cleaner.execute();
|
||||
|
||||
await().pollDelay(Duration.ofMillis(100))
|
||||
.atMost(MAX_WAIT_TIMEOUT)
|
||||
.until(() -> !cleaner.isRunning());
|
||||
|
||||
if (cleaner.getDurationMillis() > minFileAge)
|
||||
{
|
||||
fail("Test unable to complete, since cleaner took " + cleaner.getDurationMillis() + "ms" +
|
||||
" which is longer than minFileAge [" + minFileAge + "ms]");
|
||||
}
|
||||
|
||||
// check all 'old' files deleted
|
||||
for (File file : oldFiles)
|
||||
{
|
||||
assertFalse("File should have been deleted: " + file, file.exists());
|
||||
}
|
||||
// check all 'new' files still present
|
||||
for (File file : newFiles)
|
||||
{
|
||||
assertTrue("File should not have been deleted: " + file, file.exists());
|
||||
}
|
||||
|
||||
assertEquals("Incorrect number of deleted files", newFiles.length, cleaner.getNumFilesDeleted());
|
||||
assertEquals("Incorrect total size of files deleted", newFilesTotalSize, cleaner.getSizeFilesDeleted());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void aggressiveCleanReclaimsTargetSpace() throws InterruptedException
|
||||
{
|
||||
int numFiles = 30;
|
||||
File[] files = new File[numFiles];
|
||||
for (int i = 0; i < numFiles; i++)
|
||||
{
|
||||
// Make sure it's in the cache - all the files will be in the cache, so the
|
||||
// cleaner won't clean any up once it has finished aggressively reclaiming space.
|
||||
files[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, true);
|
||||
}
|
||||
|
||||
// How much space to reclaim - seven files worth (all files are same size)
|
||||
long fileSize = files[0].length();
|
||||
long sevenFilesSize = 7 * fileSize;
|
||||
|
||||
// We'll get it to clean seven files worth aggressively and then it will continue non-aggressively.
|
||||
// It will delete the older files aggressively (i.e. the ones prior to the two second sleep) and
|
||||
// then will examine the new files for potential deletion.
|
||||
// Since some of the newer files are not in the cache, it will delete those.
|
||||
cleaner.executeAggressive("aggressiveCleanReclaimsTargetSpace()", sevenFilesSize);
|
||||
|
||||
Thread.sleep(400);
|
||||
while (cleaner.isRunning())
|
||||
{
|
||||
Thread.sleep(200);
|
||||
}
|
||||
|
||||
int numDeleted = 0;
|
||||
|
||||
for (File f : files)
|
||||
{
|
||||
if (!f.exists())
|
||||
{
|
||||
numDeleted++;
|
||||
}
|
||||
}
|
||||
// How many were definitely deleted?
|
||||
assertEquals("Wrong number of files deleted", 7, numDeleted);
|
||||
|
||||
// The cleaner should have recorded the correct number of deletions
|
||||
assertEquals("Incorrect number of deleted files", 7, cleaner.getNumFilesDeleted());
|
||||
assertEquals("Incorrect total size of files deleted", sevenFilesSize, cleaner.getSizeFilesDeleted());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void standardCleanAfterAggressiveFinished() throws InterruptedException
|
||||
{
|
||||
// Don't use numFiles > 59! as we're using this for the minute element in the cache file path.
|
||||
final int numFiles = 30;
|
||||
File[] files = new File[numFiles];
|
||||
|
||||
for (int i = 0; i < numFiles; i++)
|
||||
{
|
||||
Calendar calendar = new GregorianCalendar(2010, 11, 2, 17, i);
|
||||
|
||||
if (i >= 21 && i <= 24)
|
||||
{
|
||||
// 21 to 24 will be deleted after the aggressive deletions (once the cleaner has returned
|
||||
// to normal cleaning), because they are not in the cache.
|
||||
files[i] = createCacheFile(calendar, UrlSource.NOT_PRESENT, false);
|
||||
}
|
||||
else
|
||||
{
|
||||
// All other files will be in the cache
|
||||
files[i] = createCacheFile(calendar, UrlSource.REVERSE_CACHE_LOOKUP, true);
|
||||
}
|
||||
}
|
||||
|
||||
// How much space to reclaim - seven files worth (all files are same size)
|
||||
long fileSize = files[0].length();
|
||||
long sevenFilesSize = 7 * fileSize;
|
||||
|
||||
// We'll get it to clean seven files worth aggressively and then it will continue non-aggressively.
|
||||
// It will delete the older files aggressively (i.e. even if they are actively in the cache) and
|
||||
// then will examine the new files for potential deletion.
|
||||
// Since some of the newer files are not in the cache, it will delete those too.
|
||||
cleaner.executeAggressive("standardCleanAfterAggressiveFinished()", sevenFilesSize);
|
||||
|
||||
Thread.sleep(400);
|
||||
while (cleaner.isRunning())
|
||||
{
|
||||
Thread.sleep(200);
|
||||
}
|
||||
|
||||
for (int i = 0; i < numFiles; i++)
|
||||
{
|
||||
if (i < 7)
|
||||
{
|
||||
assertFalse("First 7 files should have been aggressively cleaned", files[i].exists());
|
||||
}
|
||||
|
||||
if (i >= 21 && i <= 24)
|
||||
{
|
||||
assertFalse("Files with indexes 21-24 should have been deleted", files[i].exists());
|
||||
}
|
||||
}
|
||||
assertEquals("Incorrect number of deleted files", 11, cleaner.getNumFilesDeleted());
|
||||
assertEquals("Incorrect total size of files deleted", (11 * fileSize), cleaner.getSizeFilesDeleted());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void emptyParentDirectoriesAreDeleted() throws FileNotFoundException
|
||||
{
|
||||
cleaner.setMaxDeleteWatchCount(0);
|
||||
File file = new File(cacheRoot, "243235984/a/b/c/d.bin");
|
||||
file.getParentFile().mkdirs();
|
||||
PrintWriter writer = new PrintWriter(file);
|
||||
writer.println("Content for emptyParentDirectoriesAreDeleted");
|
||||
writer.close();
|
||||
assertTrue("Directory should exist", new File(cacheRoot, "243235984/a/b/c").exists());
|
||||
|
||||
cleaner.handle(file);
|
||||
|
||||
assertFalse("Directory should have been deleted", new File(cacheRoot, "243235984").exists());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void markedFilesHaveDeletionDeferredUntilCorrectPassOfCleaner()
|
||||
{
|
||||
// A non-advisable setting but useful for testing, maxDeleteWatchCount of zero
|
||||
// which should result in immediate deletion upon discovery of content no longer in the cache.
|
||||
cleaner.setMaxDeleteWatchCount(0);
|
||||
File file = createCacheFile(UrlSource.NOT_PRESENT, false);
|
||||
|
||||
cleaner.handle(file);
|
||||
checkFilesDeleted(file);
|
||||
|
||||
// Anticipated to be the most common setting: maxDeleteWatchCount of 1.
|
||||
cleaner.setMaxDeleteWatchCount(1);
|
||||
file = createCacheFile(UrlSource.NOT_PRESENT, false);
|
||||
|
||||
cleaner.handle(file);
|
||||
checkWatchCountForCacheFile(file, 1);
|
||||
|
||||
cleaner.handle(file);
|
||||
checkFilesDeleted(file);
|
||||
|
||||
// Check that some other arbitrary figure for maxDeleteWatchCount works correctly.
|
||||
cleaner.setMaxDeleteWatchCount(3);
|
||||
file = createCacheFile(UrlSource.NOT_PRESENT, false);
|
||||
|
||||
cleaner.handle(file);
|
||||
checkWatchCountForCacheFile(file, 1);
|
||||
|
||||
cleaner.handle(file);
|
||||
checkWatchCountForCacheFile(file, 2);
|
||||
|
||||
cleaner.handle(file);
|
||||
checkWatchCountForCacheFile(file, 3);
|
||||
|
||||
cleaner.handle(file);
|
||||
checkFilesDeleted(file);
|
||||
}
|
||||
|
||||
private void checkFilesDeleted(File file)
|
||||
{
|
||||
assertFalse("File should have been deleted: " + file, file.exists());
|
||||
CacheFileProps props = new CacheFileProps(file);
|
||||
assertFalse("Properties file should have been deleted, cache file: " + file, props.exists());
|
||||
}
|
||||
|
||||
private void checkWatchCountForCacheFile(File file, Integer expectedWatchCount)
|
||||
{
|
||||
assertTrue("File should still exist: " + file, file.exists());
|
||||
CacheFileProps props = new CacheFileProps(file);
|
||||
props.load();
|
||||
assertEquals("File should contain correct deleteWatchCount", expectedWatchCount, props.getDeleteWatchCount());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void filesInCacheAreNotDeleted() throws InterruptedException
|
||||
{
|
||||
cleaner.setMaxDeleteWatchCount(0);
|
||||
|
||||
// The SlowContentStore will always give out content when asked,
|
||||
// so asking for any content will cause something to be cached.
|
||||
String url = makeContentUrl();
|
||||
int numFiles = 50;
|
||||
for (int i = 0; i < numFiles; i++)
|
||||
{
|
||||
ContentReader reader = cachingStore.getReader(url);
|
||||
reader.getContentString();
|
||||
}
|
||||
|
||||
cleaner.execute();
|
||||
|
||||
Thread.sleep(400);
|
||||
while (cleaner.isRunning())
|
||||
{
|
||||
Thread.sleep(200);
|
||||
}
|
||||
|
||||
for (int i = 0; i < numFiles; i++)
|
||||
{
|
||||
File cacheFile = new File(cache.getCacheFilePath(url));
|
||||
assertTrue("File should exist", cacheFile.exists());
|
||||
}
|
||||
}
|
||||
|
||||
private File createCacheFile(UrlSource urlSource, boolean putInCache)
|
||||
{
|
||||
Calendar calendar = new GregorianCalendar();
|
||||
return createCacheFile(calendar, urlSource, putInCache);
|
||||
}
|
||||
|
||||
private File createCacheFile(Calendar calendar, /* int year, int month, int day, int hour, int minute, */
|
||||
UrlSource urlSource, boolean putInCache)
|
||||
{
|
||||
File file = new File(cacheRoot, createNewCacheFilePath(calendar));
|
||||
file.getParentFile().mkdirs();
|
||||
writeSampleContent(file);
|
||||
String contentUrl = makeContentUrl();
|
||||
|
||||
if (putInCache)
|
||||
{
|
||||
cache.putIntoLookup(Key.forUrl(contentUrl), file.getAbsolutePath());
|
||||
}
|
||||
|
||||
switch (urlSource)
|
||||
{
|
||||
case NOT_PRESENT:
|
||||
// cache won't be able to determine original content URL for the file
|
||||
break;
|
||||
case PROPS_FILE:
|
||||
// file with content URL in properties file
|
||||
CacheFileProps props = new CacheFileProps(file);
|
||||
props.setContentUrl(contentUrl);
|
||||
props.store();
|
||||
break;
|
||||
case REVERSE_CACHE_LOOKUP:
|
||||
// file with content URL in reverse lookup cache - but not 'in the cache' (forward lookup).
|
||||
cache.putIntoLookup(Key.forCacheFile(file), contentUrl);
|
||||
}
|
||||
assertTrue("File should exist", file.exists());
|
||||
return file;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mimick functionality of ContentCacheImpl.createNewCacheFilePath() but allowing a specific date (rather than 'now') to be used.
|
||||
*
|
||||
* @param calendar
|
||||
* Calendar
|
||||
* @return Path to use for cache file.
|
||||
*/
|
||||
private String createNewCacheFilePath(Calendar calendar)
|
||||
{
|
||||
int year = calendar.get(Calendar.YEAR);
|
||||
int month = calendar.get(Calendar.MONTH) + 1; // 0-based
|
||||
int day = calendar.get(Calendar.DAY_OF_MONTH);
|
||||
int hour = calendar.get(Calendar.HOUR_OF_DAY);
|
||||
int minute = calendar.get(Calendar.MINUTE);
|
||||
// create the URL
|
||||
StringBuilder sb = new StringBuilder(20);
|
||||
sb.append(year).append('/')
|
||||
.append(month).append('/')
|
||||
.append(day).append('/')
|
||||
.append(hour).append('/')
|
||||
.append(minute).append('/')
|
||||
.append(GUID.generate()).append(".bin");
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private String makeContentUrl()
|
||||
{
|
||||
return "protocol://some/made/up/url/" + GUID.generate();
|
||||
}
|
||||
|
||||
private void writeSampleContent(File file)
|
||||
{
|
||||
try
|
||||
{
|
||||
PrintWriter writer = new PrintWriter(file);
|
||||
writer.println("Content for sample file in " + getClass().getName());
|
||||
writer.close();
|
||||
}
|
||||
catch (Throwable e)
|
||||
{
|
||||
throw new RuntimeException("Couldn't write file: " + file, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,57 +1,57 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.util;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.ContextCustomizerFactories;
|
||||
import org.springframework.test.context.junit4.SpringRunner;
|
||||
|
||||
/**
|
||||
* Base test class providing Hibernate sessions.
|
||||
* <p>
|
||||
* By default this is auto-wired by type. If a this is going to
|
||||
* result in a conlict the use auto-wire by name. This can be done by
|
||||
* setting populateProtectedVariables to true in the constructor and
|
||||
* then adding protected members with the same name as the bean you require.
|
||||
*
|
||||
* @author Derek Hulley
|
||||
*/
|
||||
@RunWith(SpringRunner.class)
|
||||
@ContextConfiguration({"classpath:alfresco/application-context.xml"})
|
||||
@ContextCustomizerFactories(factories = {}, mergeMode = ContextCustomizerFactories.MergeMode.REPLACE_DEFAULTS)
|
||||
public abstract class BaseSpringTest extends TestCase
|
||||
{
|
||||
public Log logger = LogFactory.getLog(getClass().getName());
|
||||
|
||||
@Autowired
|
||||
protected ApplicationContext applicationContext;
|
||||
}
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.util;
|
||||
|
||||
import java.time.Duration;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.ContextCustomizerFactories;
|
||||
import org.springframework.test.context.junit4.SpringRunner;
|
||||
|
||||
/**
|
||||
* Base test class providing Hibernate sessions.
|
||||
* <p>
|
||||
* By default this is auto-wired by type. If a this is going to result in a conlict the use auto-wire by name. This can be done by setting populateProtectedVariables to true in the constructor and then adding protected members with the same name as the bean you require.
|
||||
*
|
||||
* @author Derek Hulley
|
||||
*/
|
||||
@RunWith(SpringRunner.class)
|
||||
@ContextConfiguration({"classpath:alfresco/application-context.xml"})
|
||||
@ContextCustomizerFactories(factories = {}, mergeMode = ContextCustomizerFactories.MergeMode.REPLACE_DEFAULTS)
|
||||
public abstract class BaseSpringTest extends TestCase
|
||||
{
|
||||
protected static final Duration MAX_ASYNC_TIMEOUT = Duration.ofSeconds(10);
|
||||
public Log logger = LogFactory.getLog(getClass().getName());
|
||||
|
||||
@Autowired
|
||||
protected ApplicationContext applicationContext;
|
||||
}
|
||||
|
Reference in New Issue
Block a user