mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-07-24 17:32:48 +00:00
ACS 9256 improve async tests stability (#3191)
* ACS-9256 Improved stability in DynamicallySizedThreadPoolExecutorTest * ACS-9256 Removed unused unstable test in SpringAwareUserTransactionTest * ACS-9256 Improved stability in DynamicallySizedThreadPoolExecutorTest * ACS-9256 Improved stability in ActionServiceImplTest and RuleServiceCoverageTest * ACS-9256 Improved stability in ActionTrackingServiceImplTest * ACS-9256 Improved performance in ComparePropertyValueEvaluatorTest * ACS-9256 Improved performance in LockServiceImplTest * ACS-9256 Improved stability in LockBehaviourImplTest * ACS-9256 Improved stability in ContentMetadataExtracterTest * ACS-9256 Removed unstable and unused tests * ACS-9256 Improve stability in CachedContentCleanupJobTest * ACS-9256 Pre-commit fixes
This commit is contained in:
@@ -1431,26 +1431,6 @@
|
|||||||
"is_secret": false
|
"is_secret": false
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"repository/src/test/java/org/alfresco/repo/lock/LockBehaviourImplTest.java": [
|
|
||||||
{
|
|
||||||
"type": "Secret Keyword",
|
|
||||||
"filename": "repository/src/test/java/org/alfresco/repo/lock/LockBehaviourImplTest.java",
|
|
||||||
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
|
|
||||||
"is_verified": false,
|
|
||||||
"line_number": 112,
|
|
||||||
"is_secret": false
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"repository/src/test/java/org/alfresco/repo/lock/LockServiceImplTest.java": [
|
|
||||||
{
|
|
||||||
"type": "Secret Keyword",
|
|
||||||
"filename": "repository/src/test/java/org/alfresco/repo/lock/LockServiceImplTest.java",
|
|
||||||
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
|
|
||||||
"is_verified": false,
|
|
||||||
"line_number": 103,
|
|
||||||
"is_secret": false
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"repository/src/test/java/org/alfresco/repo/management/JmxDumpUtilTest.java": [
|
"repository/src/test/java/org/alfresco/repo/management/JmxDumpUtilTest.java": [
|
||||||
{
|
{
|
||||||
"type": "Secret Keyword",
|
"type": "Secret Keyword",
|
||||||
@@ -1888,5 +1868,5 @@
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"generated_at": "2024-12-19T08:58:42Z"
|
"generated_at": "2025-02-11T13:28:51Z"
|
||||||
}
|
}
|
||||||
|
@@ -145,6 +145,12 @@
|
|||||||
<groupId>org.apache.httpcomponents</groupId>
|
<groupId>org.apache.httpcomponents</groupId>
|
||||||
<artifactId>httpclient</artifactId>
|
<artifactId>httpclient</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.awaitility</groupId>
|
||||||
|
<artifactId>awaitility</artifactId>
|
||||||
|
<version>${dependency.awaitility.version}</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (C) 2005-2014 Alfresco Software Limited.
|
* Copyright (C) 2005-2025 Alfresco Software Limited.
|
||||||
*
|
*
|
||||||
* This file is part of Alfresco
|
* This file is part of Alfresco
|
||||||
*
|
*
|
||||||
@@ -18,6 +18,9 @@
|
|||||||
*/
|
*/
|
||||||
package org.alfresco.util;
|
package org.alfresco.util;
|
||||||
|
|
||||||
|
import static org.awaitility.Awaitility.await;
|
||||||
|
|
||||||
|
import java.time.Duration;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.concurrent.BlockingQueue;
|
import java.util.concurrent.BlockingQueue;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
@@ -26,20 +29,20 @@ import java.util.concurrent.LinkedBlockingQueue;
|
|||||||
import java.util.concurrent.ThreadPoolExecutor;
|
import java.util.concurrent.ThreadPoolExecutor;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
import junit.framework.TestCase;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests for our instance of {@link java.util.concurrent.ThreadPoolExecutor}
|
* Tests for our instance of {@link java.util.concurrent.ThreadPoolExecutor}
|
||||||
*
|
*
|
||||||
* @author Nick Burch
|
* @author Nick Burch
|
||||||
*/
|
*/
|
||||||
public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
||||||
{
|
{
|
||||||
|
|
||||||
private static Log logger = LogFactory.getLog(DynamicallySizedThreadPoolExecutorTest.class);
|
private static final Duration MAX_WAIT_TIMEOUT = Duration.ofSeconds(1);
|
||||||
|
private static final Log logger = LogFactory.getLog(DynamicallySizedThreadPoolExecutorTest.class);
|
||||||
private static final int DEFAULT_KEEP_ALIVE_TIME = 90;
|
private static final int DEFAULT_KEEP_ALIVE_TIME = 90;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -48,9 +51,9 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
|||||||
SleepUntilAllWake.reset();
|
SleepUntilAllWake.reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testUpToCore() throws Exception
|
public void testUpToCore()
|
||||||
{
|
{
|
||||||
DynamicallySizedThreadPoolExecutor exec = createInstance(5,10, DEFAULT_KEEP_ALIVE_TIME);
|
DynamicallySizedThreadPoolExecutor exec = createInstance(5, 10, DEFAULT_KEEP_ALIVE_TIME);
|
||||||
|
|
||||||
assertEquals(0, exec.getPoolSize());
|
assertEquals(0, exec.getPoolSize());
|
||||||
exec.execute(new SleepUntilAllWake());
|
exec.execute(new SleepUntilAllWake());
|
||||||
@@ -61,15 +64,15 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
|||||||
assertEquals(4, exec.getPoolSize());
|
assertEquals(4, exec.getPoolSize());
|
||||||
exec.execute(new SleepUntilAllWake());
|
exec.execute(new SleepUntilAllWake());
|
||||||
assertEquals(5, exec.getPoolSize());
|
assertEquals(5, exec.getPoolSize());
|
||||||
|
|
||||||
SleepUntilAllWake.wakeAll();
|
SleepUntilAllWake.wakeAll();
|
||||||
Thread.sleep(100);
|
waitForPoolSizeEquals(exec, 5);
|
||||||
assertEquals(5, exec.getPoolSize());
|
assertEquals(5, exec.getPoolSize());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testPastCoreButNotHugeQueue() throws Exception
|
public void testPastCoreButNotHugeQueue()
|
||||||
{
|
{
|
||||||
DynamicallySizedThreadPoolExecutor exec = createInstance(5,10, DEFAULT_KEEP_ALIVE_TIME);
|
DynamicallySizedThreadPoolExecutor exec = createInstance(5, 10, DEFAULT_KEEP_ALIVE_TIME);
|
||||||
|
|
||||||
assertEquals(0, exec.getPoolSize());
|
assertEquals(0, exec.getPoolSize());
|
||||||
assertEquals(0, exec.getQueue().size());
|
assertEquals(0, exec.getQueue().size());
|
||||||
@@ -80,7 +83,7 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
|||||||
exec.execute(new SleepUntilAllWake());
|
exec.execute(new SleepUntilAllWake());
|
||||||
assertEquals(5, exec.getPoolSize());
|
assertEquals(5, exec.getPoolSize());
|
||||||
assertEquals(0, exec.getQueue().size());
|
assertEquals(0, exec.getQueue().size());
|
||||||
|
|
||||||
// Need to hit max pool size before it adds more
|
// Need to hit max pool size before it adds more
|
||||||
exec.execute(new SleepUntilAllWake());
|
exec.execute(new SleepUntilAllWake());
|
||||||
exec.execute(new SleepUntilAllWake());
|
exec.execute(new SleepUntilAllWake());
|
||||||
@@ -89,20 +92,20 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
|||||||
exec.execute(new SleepUntilAllWake());
|
exec.execute(new SleepUntilAllWake());
|
||||||
assertEquals(5, exec.getPoolSize());
|
assertEquals(5, exec.getPoolSize());
|
||||||
assertEquals(5, exec.getQueue().size());
|
assertEquals(5, exec.getQueue().size());
|
||||||
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
exec.execute(new SleepUntilAllWake());
|
||||||
exec.execute(new SleepUntilAllWake());
|
exec.execute(new SleepUntilAllWake());
|
||||||
assertEquals(5, exec.getPoolSize());
|
assertEquals(5, exec.getPoolSize());
|
||||||
assertEquals(7, exec.getQueue().size());
|
assertEquals(7, exec.getQueue().size());
|
||||||
|
|
||||||
SleepUntilAllWake.wakeAll();
|
SleepUntilAllWake.wakeAll();
|
||||||
Thread.sleep(100);
|
waitForPoolSizeEquals(exec, 5);
|
||||||
assertEquals(5, exec.getPoolSize());
|
assertEquals(5, exec.getPoolSize());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToExpandQueue() throws Exception
|
public void testToExpandQueue() throws Exception
|
||||||
{
|
{
|
||||||
DynamicallySizedThreadPoolExecutor exec = createInstance(2,4,1);
|
DynamicallySizedThreadPoolExecutor exec = createInstance(2, 4, 5);
|
||||||
|
|
||||||
assertEquals(0, exec.getPoolSize());
|
assertEquals(0, exec.getPoolSize());
|
||||||
assertEquals(0, exec.getQueue().size());
|
assertEquals(0, exec.getQueue().size());
|
||||||
@@ -110,166 +113,37 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
|||||||
exec.execute(new SleepUntilAllWake());
|
exec.execute(new SleepUntilAllWake());
|
||||||
assertEquals(2, exec.getPoolSize());
|
assertEquals(2, exec.getPoolSize());
|
||||||
assertEquals(0, exec.getQueue().size());
|
assertEquals(0, exec.getQueue().size());
|
||||||
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
exec.execute(new SleepUntilAllWake());
|
||||||
exec.execute(new SleepUntilAllWake());
|
exec.execute(new SleepUntilAllWake());
|
||||||
exec.execute(new SleepUntilAllWake());
|
exec.execute(new SleepUntilAllWake());
|
||||||
assertEquals(2, exec.getPoolSize());
|
assertEquals(2, exec.getPoolSize());
|
||||||
assertEquals(3, exec.getQueue().size());
|
assertEquals(3, exec.getQueue().size());
|
||||||
|
|
||||||
// Next should add one
|
// Next should add one
|
||||||
exec.execute(new SleepUntilAllWake());
|
exec.execute(new SleepUntilAllWake());
|
||||||
Thread.sleep(20); // Let the new thread spin up
|
waitForPoolSizeEquals(exec, 3); // Let the new thread spin up
|
||||||
assertEquals(3, exec.getPoolSize());
|
assertEquals(3, exec.getPoolSize());
|
||||||
assertEquals(3, exec.getQueue().size());
|
assertEquals(3, exec.getQueue().size());
|
||||||
|
|
||||||
// And again
|
// And again
|
||||||
exec.execute(new SleepUntilAllWake());
|
exec.execute(new SleepUntilAllWake());
|
||||||
Thread.sleep(20); // Let the new thread spin up
|
waitForPoolSizeEquals(exec, 4); // Let the new thread spin up
|
||||||
assertEquals(4, exec.getPoolSize());
|
assertEquals(4, exec.getPoolSize());
|
||||||
assertEquals(3, exec.getQueue().size());
|
assertEquals(3, exec.getQueue().size());
|
||||||
|
|
||||||
// But no more will be added, as we're at max
|
// But no more will be added, as we're at max
|
||||||
exec.execute(new SleepUntilAllWake());
|
exec.execute(new SleepUntilAllWake());
|
||||||
exec.execute(new SleepUntilAllWake());
|
exec.execute(new SleepUntilAllWake());
|
||||||
exec.execute(new SleepUntilAllWake());
|
exec.execute(new SleepUntilAllWake());
|
||||||
assertEquals(4, exec.getPoolSize());
|
assertEquals(4, exec.getPoolSize());
|
||||||
assertEquals(6, exec.getQueue().size());
|
assertEquals(6, exec.getQueue().size());
|
||||||
|
|
||||||
SleepUntilAllWake.wakeAll();
|
|
||||||
Thread.sleep(100);
|
|
||||||
|
|
||||||
// All threads still running, as 1 second timeout
|
|
||||||
assertEquals(4, exec.getPoolSize());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void offTestToExpandThenContract() throws Exception
|
SleepUntilAllWake.wakeAll();
|
||||||
{
|
Thread.sleep(100);
|
||||||
DynamicallySizedThreadPoolExecutor exec = createInstance(2,4,1);
|
|
||||||
exec.setKeepAliveTime(30, TimeUnit.MILLISECONDS);
|
|
||||||
|
|
||||||
assertEquals(0, exec.getPoolSize());
|
|
||||||
assertEquals(0, exec.getQueue().size());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
assertEquals(2, exec.getPoolSize());
|
|
||||||
assertEquals(0, exec.getQueue().size());
|
|
||||||
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
assertEquals(2, exec.getPoolSize());
|
|
||||||
assertEquals(3, exec.getQueue().size());
|
|
||||||
|
|
||||||
// Next should add one
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
Thread.sleep(20); // Let the new thread spin up
|
|
||||||
assertEquals(3, exec.getPoolSize());
|
|
||||||
assertEquals(3, exec.getQueue().size());
|
|
||||||
|
|
||||||
// And again
|
// All threads still running, as 5 second timeout
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
Thread.sleep(20); // Let the new thread spin up
|
|
||||||
assertEquals(4, exec.getPoolSize());
|
assertEquals(4, exec.getPoolSize());
|
||||||
assertEquals(3, exec.getQueue().size());
|
|
||||||
|
|
||||||
// But no more will be added, as we're at max
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
assertEquals(4, exec.getPoolSize());
|
|
||||||
assertEquals(6, exec.getQueue().size());
|
|
||||||
|
|
||||||
SleepUntilAllWake.wakeAll();
|
|
||||||
Thread.sleep(100);
|
|
||||||
|
|
||||||
// Wait longer than the timeout without any work, which should
|
|
||||||
// let all the extra threads go away
|
|
||||||
// (Depending on how closely your JVM follows the specification,
|
|
||||||
// we may fall back to the core size which is correct, or we
|
|
||||||
// may go to zero which is wrong, but hey, it's the JVM...)
|
|
||||||
logger.debug("Core pool size is " + exec.getCorePoolSize());
|
|
||||||
logger.debug("Current pool size is " + exec.getPoolSize());
|
|
||||||
logger.debug("Queue size is " + exec.getQueue().size());
|
|
||||||
assertTrue(
|
|
||||||
"Pool size should be 0-2 as everything is idle, was " + exec.getPoolSize(),
|
|
||||||
exec.getPoolSize() >= 0
|
|
||||||
);
|
|
||||||
assertTrue(
|
|
||||||
"Pool size should be 0-2 as everything is idle, was " + exec.getPoolSize(),
|
|
||||||
exec.getPoolSize() <= 2
|
|
||||||
);
|
|
||||||
|
|
||||||
SleepUntilAllWake.reset();
|
|
||||||
|
|
||||||
// Add 2 new jobs, will stay/ go to at 2 threads
|
|
||||||
assertEquals(0, exec.getQueue().size());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
|
|
||||||
// Let the idle threads grab them, then check
|
|
||||||
Thread.sleep(20);
|
|
||||||
assertEquals(2, exec.getPoolSize());
|
|
||||||
assertEquals(0, exec.getQueue().size());
|
|
||||||
|
|
||||||
// 3 more, still at 2 threads
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
assertEquals(2, exec.getPoolSize());
|
|
||||||
assertEquals(3, exec.getQueue().size());
|
|
||||||
|
|
||||||
// And again wait for it all
|
|
||||||
SleepUntilAllWake.wakeAll();
|
|
||||||
Thread.sleep(100);
|
|
||||||
assertEquals(2, exec.getPoolSize());
|
|
||||||
|
|
||||||
|
|
||||||
// Now decrease the overall pool size
|
|
||||||
// Will rise and fall to there now
|
|
||||||
exec.setCorePoolSize(1);
|
|
||||||
|
|
||||||
// Run a quick job, to ensure that the
|
|
||||||
// "can I kill one yet" logic is applied
|
|
||||||
SleepUntilAllWake.reset();
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
SleepUntilAllWake.wakeAll();
|
|
||||||
|
|
||||||
Thread.sleep(100);
|
|
||||||
assertEquals(1, exec.getPoolSize());
|
|
||||||
assertEquals(0, exec.getQueue().size());
|
|
||||||
|
|
||||||
SleepUntilAllWake.reset();
|
|
||||||
|
|
||||||
|
|
||||||
// Push enough on to go up to 4 active threads
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
exec.execute(new SleepUntilAllWake());
|
|
||||||
|
|
||||||
Thread.sleep(20); // Let the new threads spin up
|
|
||||||
assertEquals(4, exec.getPoolSize());
|
|
||||||
assertEquals(6, exec.getQueue().size());
|
|
||||||
|
|
||||||
// Wait for them all to finish, should drop back to 1 now
|
|
||||||
// (Or zero, if your JVM can't read the specification...)
|
|
||||||
SleepUntilAllWake.wakeAll();
|
|
||||||
Thread.sleep(100);
|
|
||||||
assertTrue(
|
|
||||||
"Pool size should be 0 or 1 as everything is idle, was " + exec.getPoolSize(),
|
|
||||||
exec.getPoolSize() >= 0
|
|
||||||
);
|
|
||||||
assertTrue(
|
|
||||||
"Pool size should be 0 or 1 as everything is idle, was " + exec.getPoolSize(),
|
|
||||||
exec.getPoolSize() <= 1
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private DynamicallySizedThreadPoolExecutor createInstance(int corePoolSize, int maximumPoolSize, int keepAliveTime)
|
private DynamicallySizedThreadPoolExecutor createInstance(int corePoolSize, int maximumPoolSize, int keepAliveTime)
|
||||||
@@ -291,6 +165,11 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
|||||||
new ThreadPoolExecutor.CallerRunsPolicy());
|
new ThreadPoolExecutor.CallerRunsPolicy());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void waitForPoolSizeEquals(DynamicallySizedThreadPoolExecutor exec, int expectedSize)
|
||||||
|
{
|
||||||
|
await().atMost(MAX_WAIT_TIMEOUT).until(() -> exec.getPoolSize() == expectedSize);
|
||||||
|
}
|
||||||
|
|
||||||
public static class SleepUntilAllWake implements Runnable
|
public static class SleepUntilAllWake implements Runnable
|
||||||
{
|
{
|
||||||
private static ConcurrentMap<String, Thread> sleeping = new ConcurrentHashMap<String, Thread>();
|
private static ConcurrentMap<String, Thread> sleeping = new ConcurrentHashMap<String, Thread>();
|
||||||
@@ -299,31 +178,33 @@ public class DynamicallySizedThreadPoolExecutorTest extends TestCase
|
|||||||
@Override
|
@Override
|
||||||
public void run()
|
public void run()
|
||||||
{
|
{
|
||||||
if(allAwake) return;
|
if (allAwake)
|
||||||
|
return;
|
||||||
|
|
||||||
// Track us, and wait for the bang
|
// Track us, and wait for the bang
|
||||||
logger.debug("Adding thread: " + Thread.currentThread().getName());
|
logger.debug("Adding thread: " + Thread.currentThread().getName());
|
||||||
sleeping.put(Thread.currentThread().getName(), Thread.currentThread());
|
sleeping.put(Thread.currentThread().getName(), Thread.currentThread());
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
Thread.sleep(30*1000);
|
Thread.sleep(30 * 1000);
|
||||||
System.err.println("Warning - Thread finished sleeping without wake!");
|
System.err.println("Warning - Thread finished sleeping without wake!");
|
||||||
}
|
}
|
||||||
catch(InterruptedException e)
|
catch (InterruptedException e)
|
||||||
{
|
{
|
||||||
logger.debug("Interrupted thread: " + Thread.currentThread().getName());
|
logger.debug("Interrupted thread: " + Thread.currentThread().getName());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void wakeAll()
|
public static void wakeAll()
|
||||||
{
|
{
|
||||||
allAwake = true;
|
allAwake = true;
|
||||||
for(Entry<String, Thread> t : sleeping.entrySet())
|
for (Entry<String, Thread> t : sleeping.entrySet())
|
||||||
{
|
{
|
||||||
logger.debug("Interrupting thread: " + t.getKey());
|
logger.debug("Interrupting thread: " + t.getKey());
|
||||||
t.getValue().interrupt();
|
t.getValue().interrupt();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void reset()
|
public static void reset()
|
||||||
{
|
{
|
||||||
logger.debug("Resetting.");
|
logger.debug("Resetting.");
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (C) 2005-2023 Alfresco Software Limited.
|
* Copyright (C) 2005-2025 Alfresco Software Limited.
|
||||||
*
|
*
|
||||||
* This file is part of Alfresco
|
* This file is part of Alfresco
|
||||||
*
|
*
|
||||||
@@ -20,13 +20,11 @@ package org.alfresco.util.transaction;
|
|||||||
|
|
||||||
import java.util.NoSuchElementException;
|
import java.util.NoSuchElementException;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
import jakarta.transaction.RollbackException;
|
import jakarta.transaction.RollbackException;
|
||||||
import jakarta.transaction.Status;
|
import jakarta.transaction.Status;
|
||||||
import jakarta.transaction.UserTransaction;
|
import jakarta.transaction.UserTransaction;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import junit.framework.TestCase;
|
||||||
|
|
||||||
import org.springframework.transaction.CannotCreateTransactionException;
|
import org.springframework.transaction.CannotCreateTransactionException;
|
||||||
import org.springframework.transaction.NoTransactionException;
|
import org.springframework.transaction.NoTransactionException;
|
||||||
import org.springframework.transaction.TransactionDefinition;
|
import org.springframework.transaction.TransactionDefinition;
|
||||||
@@ -35,21 +33,20 @@ import org.springframework.transaction.support.AbstractPlatformTransactionManage
|
|||||||
import org.springframework.transaction.support.DefaultTransactionStatus;
|
import org.springframework.transaction.support.DefaultTransactionStatus;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @see org.alfresco.util.transaction.SpringAwareUserTransaction
|
|
||||||
*
|
|
||||||
* @author Derek Hulley
|
* @author Derek Hulley
|
||||||
|
* @see org.alfresco.util.transaction.SpringAwareUserTransaction
|
||||||
*/
|
*/
|
||||||
public class SpringAwareUserTransactionTest extends TestCase
|
public class SpringAwareUserTransactionTest extends TestCase
|
||||||
{
|
{
|
||||||
private DummyTransactionManager transactionManager;
|
private DummyTransactionManager transactionManager;
|
||||||
private FailingTransactionManager failingTransactionManager;
|
private FailingTransactionManager failingTransactionManager;
|
||||||
private UserTransaction txn;
|
private UserTransaction txn;
|
||||||
|
|
||||||
public SpringAwareUserTransactionTest()
|
public SpringAwareUserTransactionTest()
|
||||||
{
|
{
|
||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void setUp() throws Exception
|
protected void setUp() throws Exception
|
||||||
{
|
{
|
||||||
@@ -57,7 +54,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
failingTransactionManager = new FailingTransactionManager();
|
failingTransactionManager = new FailingTransactionManager();
|
||||||
txn = getTxn();
|
txn = getTxn();
|
||||||
}
|
}
|
||||||
|
|
||||||
private UserTransaction getTxn()
|
private UserTransaction getTxn()
|
||||||
{
|
{
|
||||||
return new SpringAwareUserTransaction(
|
return new SpringAwareUserTransaction(
|
||||||
@@ -67,13 +64,13 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
TransactionDefinition.PROPAGATION_REQUIRED,
|
TransactionDefinition.PROPAGATION_REQUIRED,
|
||||||
TransactionDefinition.TIMEOUT_DEFAULT);
|
TransactionDefinition.TIMEOUT_DEFAULT);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSetUp() throws Exception
|
public void testSetUp() throws Exception
|
||||||
{
|
{
|
||||||
assertNotNull(transactionManager);
|
assertNotNull(transactionManager);
|
||||||
assertNotNull(txn);
|
assertNotNull(txn);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void checkNoStatusOnThread()
|
private void checkNoStatusOnThread()
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
@@ -86,7 +83,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
// expected
|
// expected
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNoTxnStatus() throws Exception
|
public void testNoTxnStatus() throws Exception
|
||||||
{
|
{
|
||||||
checkNoStatusOnThread();
|
checkNoStatusOnThread();
|
||||||
@@ -134,7 +131,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
}
|
}
|
||||||
checkNoStatusOnThread();
|
checkNoStatusOnThread();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSimpleTxnWithRollback() throws Exception
|
public void testSimpleTxnWithRollback() throws Exception
|
||||||
{
|
{
|
||||||
testNoTxnStatus();
|
testNoTxnStatus();
|
||||||
@@ -156,7 +153,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
transactionManager.getStatus());
|
transactionManager.getStatus());
|
||||||
checkNoStatusOnThread();
|
checkNoStatusOnThread();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNoBeginCommit() throws Exception
|
public void testNoBeginCommit() throws Exception
|
||||||
{
|
{
|
||||||
testNoTxnStatus();
|
testNoTxnStatus();
|
||||||
@@ -171,7 +168,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
}
|
}
|
||||||
checkNoStatusOnThread();
|
checkNoStatusOnThread();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testPostRollbackCommitDetection() throws Exception
|
public void testPostRollbackCommitDetection() throws Exception
|
||||||
{
|
{
|
||||||
testNoTxnStatus();
|
testNoTxnStatus();
|
||||||
@@ -189,7 +186,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
}
|
}
|
||||||
checkNoStatusOnThread();
|
checkNoStatusOnThread();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testPostSetRollbackOnlyCommitDetection() throws Exception
|
public void testPostSetRollbackOnlyCommitDetection() throws Exception
|
||||||
{
|
{
|
||||||
testNoTxnStatus();
|
testNoTxnStatus();
|
||||||
@@ -208,7 +205,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
}
|
}
|
||||||
checkNoStatusOnThread();
|
checkNoStatusOnThread();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMismatchedBeginCommit() throws Exception
|
public void testMismatchedBeginCommit() throws Exception
|
||||||
{
|
{
|
||||||
UserTransaction txn1 = getTxn();
|
UserTransaction txn1 = getTxn();
|
||||||
@@ -218,18 +215,18 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
|
|
||||||
txn1.begin();
|
txn1.begin();
|
||||||
txn2.begin();
|
txn2.begin();
|
||||||
|
|
||||||
txn2.commit();
|
txn2.commit();
|
||||||
txn1.commit();
|
txn1.commit();
|
||||||
|
|
||||||
checkNoStatusOnThread();
|
checkNoStatusOnThread();
|
||||||
|
|
||||||
txn1 = getTxn();
|
txn1 = getTxn();
|
||||||
txn2 = getTxn();
|
txn2 = getTxn();
|
||||||
|
|
||||||
txn1.begin();
|
txn1.begin();
|
||||||
txn2.begin();
|
txn2.begin();
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
txn1.commit();
|
txn1.commit();
|
||||||
@@ -245,58 +242,6 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
checkNoStatusOnThread();
|
checkNoStatusOnThread();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Test for leaked transactions (no guarantee it will succeed due to reliance
|
|
||||||
* on garbage collector), so disabled by default.
|
|
||||||
*
|
|
||||||
* Also, if it succeeds, transaction call stack tracing will be enabled
|
|
||||||
* potentially hitting the performance of all subsequent tests.
|
|
||||||
*
|
|
||||||
* @throws Exception
|
|
||||||
*/
|
|
||||||
public void xtestLeakedTransactionLogging() throws Exception
|
|
||||||
{
|
|
||||||
assertFalse(SpringAwareUserTransaction.isCallStackTraced());
|
|
||||||
|
|
||||||
TrxThread t1 = new TrxThread();
|
|
||||||
t1.start();
|
|
||||||
System.gc();
|
|
||||||
Thread.sleep(1000);
|
|
||||||
|
|
||||||
TrxThread t2 = new TrxThread();
|
|
||||||
t2.start();
|
|
||||||
System.gc();
|
|
||||||
Thread.sleep(1000);
|
|
||||||
|
|
||||||
assertTrue(SpringAwareUserTransaction.isCallStackTraced());
|
|
||||||
|
|
||||||
TrxThread t3 = new TrxThread();
|
|
||||||
t3.start();
|
|
||||||
System.gc();
|
|
||||||
Thread.sleep(3000);
|
|
||||||
System.gc();
|
|
||||||
Thread.sleep(3000);
|
|
||||||
}
|
|
||||||
|
|
||||||
private class TrxThread extends Thread
|
|
||||||
{
|
|
||||||
public void run()
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
getTrx();
|
|
||||||
}
|
|
||||||
catch (Exception e) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void getTrx() throws Exception
|
|
||||||
{
|
|
||||||
UserTransaction txn = getTxn();
|
|
||||||
txn.begin();
|
|
||||||
txn = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testConnectionPoolException() throws Exception
|
public void testConnectionPoolException() throws Exception
|
||||||
{
|
{
|
||||||
testNoTxnStatus();
|
testNoTxnStatus();
|
||||||
@@ -311,7 +256,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
// Expected fail
|
// Expected fail
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private UserTransaction getFailingTxn()
|
private UserTransaction getFailingTxn()
|
||||||
{
|
{
|
||||||
return new SpringAwareUserTransaction(
|
return new SpringAwareUserTransaction(
|
||||||
@@ -321,7 +266,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
TransactionDefinition.PROPAGATION_REQUIRED,
|
TransactionDefinition.PROPAGATION_REQUIRED,
|
||||||
TransactionDefinition.TIMEOUT_DEFAULT);
|
TransactionDefinition.TIMEOUT_DEFAULT);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testTransactionListenerOrder() throws Throwable
|
public void testTransactionListenerOrder() throws Throwable
|
||||||
{
|
{
|
||||||
testNoTxnStatus();
|
testNoTxnStatus();
|
||||||
@@ -360,12 +305,12 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
}
|
}
|
||||||
checkNoStatusOnThread();
|
checkNoStatusOnThread();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class TestTransactionListener extends TransactionListenerAdapter
|
private static class TestTransactionListener extends TransactionListenerAdapter
|
||||||
{
|
{
|
||||||
private final String name;
|
private final String name;
|
||||||
private final StringBuffer buffer;
|
private final StringBuffer buffer;
|
||||||
|
|
||||||
public TestTransactionListener(String name, StringBuffer buffer)
|
public TestTransactionListener(String name, StringBuffer buffer)
|
||||||
{
|
{
|
||||||
Objects.requireNonNull(name);
|
Objects.requireNonNull(name);
|
||||||
@@ -373,18 +318,18 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
this.name = name;
|
this.name = name;
|
||||||
this.buffer = buffer;
|
this.buffer = buffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void beforeCommit(boolean readOnly)
|
public void beforeCommit(boolean readOnly)
|
||||||
{
|
{
|
||||||
buffer.append(name);
|
buffer.append(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getName()
|
public String getName()
|
||||||
{
|
{
|
||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj)
|
public boolean equals(Object obj)
|
||||||
{
|
{
|
||||||
@@ -394,17 +339,17 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode()
|
public int hashCode()
|
||||||
{
|
{
|
||||||
return name.hashCode();
|
return name.hashCode();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Used to check that the transaction manager is being called correctly
|
* Used to check that the transaction manager is being called correctly
|
||||||
*
|
*
|
||||||
* @author Derek Hulley
|
* @author Derek Hulley
|
||||||
*/
|
*/
|
||||||
@SuppressWarnings("serial")
|
@SuppressWarnings("serial")
|
||||||
@@ -412,7 +357,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
{
|
{
|
||||||
private int status = Status.STATUS_NO_TRANSACTION;
|
private int status = Status.STATUS_NO_TRANSACTION;
|
||||||
private Object txn = new Object();
|
private Object txn = new Object();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return Returns one of the {@link Status Status.STATUS_XXX} constants
|
* @return Returns one of the {@link Status Status.STATUS_XXX} constants
|
||||||
*/
|
*/
|
||||||
@@ -441,10 +386,10 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
status = Status.STATUS_ROLLEDBACK;
|
status = Status.STATUS_ROLLEDBACK;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Throws {@link NoSuchElementException} on begin()
|
* Throws {@link NoSuchElementException} on begin()
|
||||||
*
|
*
|
||||||
* @author alex.mukha
|
* @author alex.mukha
|
||||||
*/
|
*/
|
||||||
private static class FailingTransactionManager extends AbstractPlatformTransactionManager
|
private static class FailingTransactionManager extends AbstractPlatformTransactionManager
|
||||||
@@ -452,7 +397,7 @@ public class SpringAwareUserTransactionTest extends TestCase
|
|||||||
private static final long serialVersionUID = 1L;
|
private static final long serialVersionUID = 1L;
|
||||||
private int status = Status.STATUS_NO_TRANSACTION;
|
private int status = Status.STATUS_NO_TRANSACTION;
|
||||||
private Object txn = new Object();
|
private Object txn = new Object();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return Returns one of the {@link Status Status.STATUS_XXX} constants
|
* @return Returns one of the {@link Status Status.STATUS_XXX} constants
|
||||||
*/
|
*/
|
||||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,363 +1,354 @@
|
|||||||
/*
|
/*
|
||||||
* #%L
|
* #%L
|
||||||
* Alfresco Repository
|
* Alfresco Repository
|
||||||
* %%
|
* %%
|
||||||
* Copyright (C) 2005 - 2023 Alfresco Software Limited
|
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||||
* %%
|
* %%
|
||||||
* This file is part of the Alfresco software.
|
* This file is part of the Alfresco software.
|
||||||
* If the software was purchased under a paid Alfresco license, the terms of
|
* If the software was purchased under a paid Alfresco license, the terms of
|
||||||
* the paid license agreement will prevail. Otherwise, the software is
|
* the paid license agreement will prevail. Otherwise, the software is
|
||||||
* provided under the following open source license terms:
|
* provided under the following open source license terms:
|
||||||
*
|
*
|
||||||
* Alfresco is free software: you can redistribute it and/or modify
|
* Alfresco is free software: you can redistribute it and/or modify
|
||||||
* it under the terms of the GNU Lesser General Public License as published by
|
* it under the terms of the GNU Lesser General Public License as published by
|
||||||
* the Free Software Foundation, either version 3 of the License, or
|
* the Free Software Foundation, either version 3 of the License, or
|
||||||
* (at your option) any later version.
|
* (at your option) any later version.
|
||||||
*
|
*
|
||||||
* Alfresco is distributed in the hope that it will be useful,
|
* Alfresco is distributed in the hope that it will be useful,
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
* GNU Lesser General Public License for more details.
|
* GNU Lesser General Public License for more details.
|
||||||
*
|
*
|
||||||
* You should have received a copy of the GNU Lesser General Public License
|
* You should have received a copy of the GNU Lesser General Public License
|
||||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
/*
|
package org.alfresco.repo.action.executer;
|
||||||
* Copyright (C) 2005 Jesper Steen M<>ller
|
|
||||||
*
|
import static org.awaitility.Awaitility.await;
|
||||||
* This file is part of Alfresco
|
|
||||||
*
|
import java.io.Serializable;
|
||||||
* Alfresco is free software: you can redistribute it and/or modify
|
import java.util.HashMap;
|
||||||
* it under the terms of the GNU Lesser General Public License as published by
|
import java.util.Map;
|
||||||
* the Free Software Foundation, either version 3 of the License, or
|
import java.util.Objects;
|
||||||
* (at your option) any later version.
|
import java.util.Properties;
|
||||||
*
|
import java.util.Set;
|
||||||
* Alfresco is distributed in the hope that it will be useful,
|
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
import org.junit.Before;
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
import org.junit.Test;
|
||||||
* GNU Lesser General Public License for more details.
|
import org.junit.experimental.categories.Category;
|
||||||
*
|
import org.springframework.test.context.transaction.TestTransaction;
|
||||||
* You should have received a copy of the GNU Lesser General Public License
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
*/
|
import org.alfresco.model.ContentModel;
|
||||||
package org.alfresco.repo.action.executer;
|
import org.alfresco.repo.action.ActionImpl;
|
||||||
|
import org.alfresco.repo.content.MimetypeMap;
|
||||||
import org.alfresco.model.ContentModel;
|
import org.alfresco.repo.content.metadata.AbstractMappingMetadataExtracter;
|
||||||
import org.alfresco.repo.action.ActionImpl;
|
import org.alfresco.repo.content.metadata.MetadataExtracterRegistry;
|
||||||
import org.alfresco.repo.content.MimetypeMap;
|
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
|
||||||
import org.alfresco.repo.content.metadata.AbstractMappingMetadataExtracter;
|
import org.alfresco.repo.security.authentication.AuthenticationComponent;
|
||||||
import org.alfresco.repo.content.metadata.MetadataExtracterRegistry;
|
import org.alfresco.repo.transaction.RetryingTransactionHelper;
|
||||||
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
|
import org.alfresco.service.cmr.repository.ContentReader;
|
||||||
import org.alfresco.repo.security.authentication.AuthenticationComponent;
|
import org.alfresco.service.cmr.repository.ContentService;
|
||||||
import org.alfresco.repo.transaction.RetryingTransactionHelper;
|
import org.alfresco.service.cmr.repository.ContentWriter;
|
||||||
import org.alfresco.service.cmr.repository.ContentReader;
|
import org.alfresco.service.cmr.repository.NodeRef;
|
||||||
import org.alfresco.service.cmr.repository.ContentService;
|
import org.alfresco.service.cmr.repository.NodeService;
|
||||||
import org.alfresco.service.cmr.repository.ContentWriter;
|
import org.alfresco.service.cmr.repository.StoreRef;
|
||||||
import org.alfresco.service.cmr.repository.NodeRef;
|
import org.alfresco.service.namespace.NamespaceService;
|
||||||
import org.alfresco.service.cmr.repository.NodeService;
|
import org.alfresco.service.namespace.QName;
|
||||||
import org.alfresco.service.cmr.repository.StoreRef;
|
import org.alfresco.service.transaction.TransactionService;
|
||||||
import org.alfresco.service.namespace.NamespaceService;
|
import org.alfresco.test_category.BaseSpringTestsCategory;
|
||||||
import org.alfresco.service.namespace.QName;
|
import org.alfresco.util.BaseSpringTest;
|
||||||
import org.alfresco.service.transaction.TransactionService;
|
import org.alfresco.util.GUID;
|
||||||
import org.alfresco.test_category.BaseSpringTestsCategory;
|
|
||||||
import org.alfresco.util.BaseSpringTest;
|
/**
|
||||||
import org.alfresco.util.GUID;
|
* Test of the ActionExecuter for extracting metadata.
|
||||||
import org.junit.Before;
|
*
|
||||||
import org.junit.Test;
|
* @author Jesper Steen Møller
|
||||||
import org.junit.experimental.categories.Category;
|
*/
|
||||||
import org.springframework.test.context.transaction.TestTransaction;
|
@Category(BaseSpringTestsCategory.class)
|
||||||
import org.springframework.transaction.annotation.Transactional;
|
@Transactional
|
||||||
|
public class ContentMetadataExtracterTest extends BaseSpringTest
|
||||||
import java.io.Serializable;
|
{
|
||||||
import java.util.HashMap;
|
protected static final String QUICK_TITLE = "The quick brown fox jumps over the lazy dog";
|
||||||
import java.util.Map;
|
protected static final String QUICK_DESCRIPTION = "Pangram, fox, dog, Gym class featuring a brown fox and lazy dog";
|
||||||
import java.util.Properties;
|
protected static final String QUICK_CREATOR = "Nevin Nollop";
|
||||||
import java.util.Set;
|
|
||||||
|
private NodeService nodeService;
|
||||||
/**
|
private ContentService contentService;
|
||||||
* Test of the ActionExecuter for extracting metadata.
|
private MetadataExtracterRegistry registry;
|
||||||
*
|
private TransactionService transactionService;
|
||||||
* @author Jesper Steen Møller
|
private StoreRef testStoreRef;
|
||||||
*/
|
private NodeRef rootNodeRef;
|
||||||
@Category(BaseSpringTestsCategory.class)
|
private NodeRef nodeRef;
|
||||||
@Transactional
|
|
||||||
public class ContentMetadataExtracterTest extends BaseSpringTest
|
private ContentMetadataExtracter executer;
|
||||||
{
|
|
||||||
protected static final String QUICK_TITLE = "The quick brown fox jumps over the lazy dog";
|
private final static String ID = GUID.generate();
|
||||||
protected static final String QUICK_DESCRIPTION = "Pangram, fox, dog, Gym class featuring a brown fox and lazy dog";
|
|
||||||
protected static final String QUICK_CREATOR = "Nevin Nollop";
|
@Before
|
||||||
|
public void before() throws Exception
|
||||||
private NodeService nodeService;
|
{
|
||||||
private ContentService contentService;
|
this.nodeService = (NodeService) this.applicationContext.getBean("nodeService");
|
||||||
private MetadataExtracterRegistry registry;
|
this.contentService = (ContentService) this.applicationContext.getBean("contentService");
|
||||||
private TransactionService transactionService;
|
registry = (MetadataExtracterRegistry) applicationContext.getBean("metadataExtracterRegistry");
|
||||||
private StoreRef testStoreRef;
|
transactionService = (TransactionService) this.applicationContext.getBean("transactionService");
|
||||||
private NodeRef rootNodeRef;
|
|
||||||
private NodeRef nodeRef;
|
AuthenticationComponent authenticationComponent = (AuthenticationComponent) applicationContext.getBean("authenticationComponent");
|
||||||
|
authenticationComponent.setSystemUserAsCurrentUser();
|
||||||
private ContentMetadataExtracter executer;
|
|
||||||
|
// Create the store and get the root node
|
||||||
private final static String ID = GUID.generate();
|
this.testStoreRef = this.nodeService.createStore(
|
||||||
|
StoreRef.PROTOCOL_WORKSPACE,
|
||||||
@Before
|
"Test_" + System.currentTimeMillis());
|
||||||
public void before() throws Exception
|
this.rootNodeRef = this.nodeService.getRootNode(this.testStoreRef);
|
||||||
{
|
|
||||||
this.nodeService = (NodeService) this.applicationContext.getBean("nodeService");
|
// Create the node used for tests
|
||||||
this.contentService = (ContentService) this.applicationContext.getBean("contentService");
|
this.nodeRef = this.nodeService.createNode(
|
||||||
registry = (MetadataExtracterRegistry) applicationContext.getBean("metadataExtracterRegistry");
|
this.rootNodeRef, ContentModel.ASSOC_CHILDREN,
|
||||||
transactionService = (TransactionService) this.applicationContext.getBean("transactionService");
|
QName.createQName("{test}testnode"),
|
||||||
|
ContentModel.TYPE_CONTENT).getChildRef();
|
||||||
AuthenticationComponent authenticationComponent = (AuthenticationComponent)applicationContext.getBean("authenticationComponent");
|
|
||||||
authenticationComponent.setSystemUserAsCurrentUser();
|
// Setup the content from the PDF test data
|
||||||
|
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
|
||||||
// Create the store and get the root node
|
cw.setMimetype(MimetypeMap.MIMETYPE_PDF);
|
||||||
this.testStoreRef = this.nodeService.createStore(
|
cw.putContent(AbstractContentTransformerTest.loadQuickTestFile("pdf"));
|
||||||
StoreRef.PROTOCOL_WORKSPACE,
|
|
||||||
"Test_" + System.currentTimeMillis());
|
// Get the executer instance
|
||||||
this.rootNodeRef = this.nodeService.getRootNode(this.testStoreRef);
|
this.executer = (ContentMetadataExtracter) this.applicationContext.getBean("extract-metadata");
|
||||||
|
}
|
||||||
// Create the node used for tests
|
|
||||||
this.nodeRef = this.nodeService.createNode(
|
/**
|
||||||
this.rootNodeRef, ContentModel.ASSOC_CHILDREN,
|
* Test execution of the extraction itself
|
||||||
QName.createQName("{test}testnode"),
|
*/
|
||||||
ContentModel.TYPE_CONTENT).getChildRef();
|
@Test
|
||||||
|
public void testFromBlanks() throws Exception
|
||||||
// Setup the content from the PDF test data
|
{
|
||||||
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
|
// Test that the action writes properties when they don't exist or are
|
||||||
cw.setMimetype(MimetypeMap.MIMETYPE_PDF);
|
// unset
|
||||||
cw.putContent(AbstractContentTransformerTest.loadQuickTestFile("pdf"));
|
|
||||||
|
// Get the old props
|
||||||
// Get the executer instance
|
Map<QName, Serializable> props = this.nodeService.getProperties(this.nodeRef);
|
||||||
this.executer = (ContentMetadataExtracter) this.applicationContext.getBean("extract-metadata");
|
props.remove(ContentModel.PROP_AUTHOR);
|
||||||
}
|
props.put(ContentModel.PROP_TITLE, "");
|
||||||
|
props.put(ContentModel.PROP_DESCRIPTION, null); // Wonder how this will
|
||||||
/**
|
// be handled
|
||||||
* Test execution of the extraction itself
|
this.nodeService.setProperties(this.nodeRef, props);
|
||||||
*/
|
|
||||||
@Test
|
// Make the nodeRef visible to other transactions as it will need to be in async requests
|
||||||
public void testFromBlanks() throws Exception
|
TestTransaction.flagForCommit();
|
||||||
{
|
TestTransaction.end();
|
||||||
// Test that the action writes properties when they don't exist or are
|
|
||||||
// unset
|
// Execute the action
|
||||||
|
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>() {
|
||||||
// Get the old props
|
public Void execute() throws Throwable
|
||||||
Map<QName, Serializable> props = this.nodeService.getProperties(this.nodeRef);
|
{
|
||||||
props.remove(ContentModel.PROP_AUTHOR);
|
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
|
||||||
props.put(ContentModel.PROP_TITLE, "");
|
executer.execute(action, nodeRef);
|
||||||
props.put(ContentModel.PROP_DESCRIPTION, null); // Wonder how this will
|
return null;
|
||||||
// be handled
|
}
|
||||||
this.nodeService.setProperties(this.nodeRef, props);
|
});
|
||||||
|
|
||||||
// Make the nodeRef visible to other transactions as it will need to be in async requests
|
// Need to wait for the async extract
|
||||||
TestTransaction.flagForCommit();
|
await().pollInSameThread()
|
||||||
TestTransaction.end();
|
.atMost(MAX_ASYNC_TIMEOUT)
|
||||||
|
.until(() -> nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION), Objects::nonNull);
|
||||||
// Execute the action
|
|
||||||
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>()
|
// Check that the properties have been set
|
||||||
{
|
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>() {
|
||||||
public Void execute() throws Throwable
|
public Void execute() throws Throwable
|
||||||
{
|
{
|
||||||
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
|
assertEquals(QUICK_TITLE, nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE));
|
||||||
executer.execute(action, nodeRef);
|
assertEquals(QUICK_DESCRIPTION, nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION));
|
||||||
return null;
|
assertEquals(QUICK_CREATOR, nodeService.getProperty(nodeRef, ContentModel.PROP_AUTHOR));
|
||||||
}
|
return null;
|
||||||
});
|
}
|
||||||
|
});
|
||||||
Thread.sleep(3000); // Need to wait for the async extract
|
}
|
||||||
|
|
||||||
// Check that the properties have been set
|
private static final QName PROP_UNKNOWN_1 = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "unkown1");
|
||||||
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>()
|
private static final QName PROP_UNKNOWN_2 = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "unkown2");
|
||||||
{
|
|
||||||
public Void execute() throws Throwable
|
private static class TestUnknownMetadataExtracter extends AbstractMappingMetadataExtracter
|
||||||
{
|
{
|
||||||
assertEquals(QUICK_TITLE, nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE));
|
public TestUnknownMetadataExtracter()
|
||||||
assertEquals(QUICK_DESCRIPTION, nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION));
|
{
|
||||||
assertEquals(QUICK_CREATOR, nodeService.getProperty(nodeRef, ContentModel.PROP_AUTHOR));
|
Properties mappingProperties = new Properties();
|
||||||
return null;
|
mappingProperties.put("unknown1", PROP_UNKNOWN_1.toString());
|
||||||
}
|
mappingProperties.put("unknown2", PROP_UNKNOWN_2.toString());
|
||||||
});
|
setMappingProperties(mappingProperties);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final QName PROP_UNKNOWN_1 = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "unkown1");
|
@Override
|
||||||
private static final QName PROP_UNKNOWN_2 = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "unkown2");
|
protected Map<String, Set<QName>> getDefaultMapping()
|
||||||
private static class TestUnknownMetadataExtracter extends AbstractMappingMetadataExtracter
|
{
|
||||||
{
|
// No need to give anything back as we have explicitly set the mapping already
|
||||||
public TestUnknownMetadataExtracter()
|
return new HashMap<String, Set<QName>>(0);
|
||||||
{
|
}
|
||||||
Properties mappingProperties = new Properties();
|
|
||||||
mappingProperties.put("unknown1", PROP_UNKNOWN_1.toString());
|
@Override
|
||||||
mappingProperties.put("unknown2", PROP_UNKNOWN_2.toString());
|
public boolean isSupported(String sourceMimetype)
|
||||||
setMappingProperties(mappingProperties);
|
{
|
||||||
}
|
return sourceMimetype.equals(MimetypeMap.MIMETYPE_BINARY);
|
||||||
@Override
|
}
|
||||||
protected Map<String, Set<QName>> getDefaultMapping()
|
|
||||||
{
|
public Map<String, Serializable> extractRaw(ContentReader reader) throws Throwable
|
||||||
// No need to give anything back as we have explicitly set the mapping already
|
{
|
||||||
return new HashMap<String, Set<QName>>(0);
|
Map<String, Serializable> rawMap = newRawMap();
|
||||||
}
|
rawMap.put("unknown1", Integer.valueOf(1));
|
||||||
@Override
|
rawMap.put("unknown2", "TWO");
|
||||||
public boolean isSupported(String sourceMimetype)
|
return rawMap;
|
||||||
{
|
}
|
||||||
return sourceMimetype.equals(MimetypeMap.MIMETYPE_BINARY);
|
}
|
||||||
}
|
|
||||||
|
@Test
|
||||||
public Map<String, Serializable> extractRaw(ContentReader reader) throws Throwable
|
public void testUnknownProperties()
|
||||||
{
|
{
|
||||||
Map<String, Serializable> rawMap = newRawMap();
|
TestUnknownMetadataExtracter extracterUnknown = new TestUnknownMetadataExtracter();
|
||||||
rawMap.put("unknown1", Integer.valueOf(1));
|
extracterUnknown.setRegistry(registry);
|
||||||
rawMap.put("unknown2", "TWO");
|
extracterUnknown.register();
|
||||||
return rawMap;
|
// Now add some content with a binary mimetype
|
||||||
}
|
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
|
||||||
}
|
cw.setMimetype(MimetypeMap.MIMETYPE_BINARY);
|
||||||
|
cw.putContent("Content for " + getName());
|
||||||
@Test
|
|
||||||
public void testUnknownProperties()
|
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
|
||||||
{
|
executer.execute(action, this.nodeRef);
|
||||||
TestUnknownMetadataExtracter extracterUnknown = new TestUnknownMetadataExtracter();
|
|
||||||
extracterUnknown.setRegistry(registry);
|
// The unkown properties should be present
|
||||||
extracterUnknown.register();
|
Serializable prop1 = nodeService.getProperty(nodeRef, PROP_UNKNOWN_1);
|
||||||
// Now add some content with a binary mimetype
|
Serializable prop2 = nodeService.getProperty(nodeRef, PROP_UNKNOWN_2);
|
||||||
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
|
|
||||||
cw.setMimetype(MimetypeMap.MIMETYPE_BINARY);
|
assertNotNull("Unknown property is null", prop1);
|
||||||
cw.putContent("Content for " + getName());
|
assertNotNull("Unknown property is null", prop2);
|
||||||
|
}
|
||||||
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
|
|
||||||
executer.execute(action, this.nodeRef);
|
private static class TestNullPropMetadataExtracter extends AbstractMappingMetadataExtracter
|
||||||
|
{
|
||||||
// The unkown properties should be present
|
public TestNullPropMetadataExtracter()
|
||||||
Serializable prop1 = nodeService.getProperty(nodeRef, PROP_UNKNOWN_1);
|
{
|
||||||
Serializable prop2 = nodeService.getProperty(nodeRef, PROP_UNKNOWN_2);
|
Properties mappingProperties = new Properties();
|
||||||
|
mappingProperties.put("title", ContentModel.PROP_TITLE.toString());
|
||||||
assertNotNull("Unknown property is null", prop1);
|
mappingProperties.put("description", ContentModel.PROP_DESCRIPTION.toString());
|
||||||
assertNotNull("Unknown property is null", prop2);
|
setMappingProperties(mappingProperties);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class TestNullPropMetadataExtracter extends AbstractMappingMetadataExtracter
|
@Override
|
||||||
{
|
protected Map<String, Set<QName>> getDefaultMapping()
|
||||||
public TestNullPropMetadataExtracter()
|
{
|
||||||
{
|
// No need to give anything back as we have explicitly set the mapping already
|
||||||
Properties mappingProperties = new Properties();
|
return new HashMap<String, Set<QName>>(0);
|
||||||
mappingProperties.put("title", ContentModel.PROP_TITLE.toString());
|
}
|
||||||
mappingProperties.put("description", ContentModel.PROP_DESCRIPTION.toString());
|
|
||||||
setMappingProperties(mappingProperties);
|
@Override
|
||||||
}
|
public boolean isSupported(String sourceMimetype)
|
||||||
@Override
|
{
|
||||||
protected Map<String, Set<QName>> getDefaultMapping()
|
return sourceMimetype.equals(MimetypeMap.MIMETYPE_BINARY);
|
||||||
{
|
}
|
||||||
// No need to give anything back as we have explicitly set the mapping already
|
|
||||||
return new HashMap<String, Set<QName>>(0);
|
public Map<String, Serializable> extractRaw(ContentReader reader) throws Throwable
|
||||||
}
|
{
|
||||||
@Override
|
Map<String, Serializable> rawMap = newRawMap();
|
||||||
public boolean isSupported(String sourceMimetype)
|
putRawValue("title", null, rawMap);
|
||||||
{
|
putRawValue("description", "", rawMap);
|
||||||
return sourceMimetype.equals(MimetypeMap.MIMETYPE_BINARY);
|
return rawMap;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
public Map<String, Serializable> extractRaw(ContentReader reader) throws Throwable
|
|
||||||
{
|
/**
|
||||||
Map<String, Serializable> rawMap = newRawMap();
|
* Ensure that missing raw values result in node properties being removed when running with {@link ContentMetadataExtracter#setCarryAspectProperties(boolean)} set to <tt>false</tt>.
|
||||||
putRawValue("title", null, rawMap);
|
*/
|
||||||
putRawValue("description", "", rawMap);
|
@Test
|
||||||
return rawMap;
|
public void testNullExtractedValues_ALF1823()
|
||||||
}
|
{
|
||||||
}
|
TestNullPropMetadataExtracter extractor = new TestNullPropMetadataExtracter();
|
||||||
|
extractor.setRegistry(registry);
|
||||||
/**
|
extractor.register();
|
||||||
* Ensure that missing raw values result in node properties being removed
|
// Now set the title and description
|
||||||
* when running with {@link ContentMetadataExtracter#setCarryAspectProperties(boolean)}
|
nodeService.setProperty(nodeRef, ContentModel.PROP_TITLE, "TITLE");
|
||||||
* set to <tt>false</tt>.
|
nodeService.setProperty(nodeRef, ContentModel.PROP_DESCRIPTION, "DESCRIPTION");
|
||||||
*/
|
// Now add some content with a binary mimetype
|
||||||
@Test
|
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
|
||||||
public void testNullExtractedValues_ALF1823()
|
cw.setMimetype(MimetypeMap.MIMETYPE_BINARY);
|
||||||
{
|
cw.putContent("Content for " + getName());
|
||||||
TestNullPropMetadataExtracter extractor = new TestNullPropMetadataExtracter();
|
|
||||||
extractor.setRegistry(registry);
|
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
|
||||||
extractor.register();
|
executer.execute(action, this.nodeRef);
|
||||||
// Now set the title and description
|
|
||||||
nodeService.setProperty(nodeRef, ContentModel.PROP_TITLE, "TITLE");
|
// cm:titled properties should be present
|
||||||
nodeService.setProperty(nodeRef, ContentModel.PROP_DESCRIPTION, "DESCRIPTION");
|
Serializable title = nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE);
|
||||||
// Now add some content with a binary mimetype
|
Serializable descr = nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION);
|
||||||
ContentWriter cw = this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
|
|
||||||
cw.setMimetype(MimetypeMap.MIMETYPE_BINARY);
|
assertNotNull("cm:title property is null", title);
|
||||||
cw.putContent("Content for " + getName());
|
assertNotNull("cm:description property is null", descr);
|
||||||
|
|
||||||
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
|
try
|
||||||
executer.execute(action, this.nodeRef);
|
{
|
||||||
|
// Now change the setting to remove unset aspect properties
|
||||||
// cm:titled properties should be present
|
executer.setCarryAspectProperties(false);
|
||||||
Serializable title = nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE);
|
// Extract again
|
||||||
Serializable descr = nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION);
|
executer.execute(action, this.nodeRef);
|
||||||
|
|
||||||
assertNotNull("cm:title property is null", title);
|
// cm:titled properties should *NOT* be present
|
||||||
assertNotNull("cm:description property is null", descr);
|
title = nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE);
|
||||||
|
descr = nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION);
|
||||||
try
|
|
||||||
{
|
assertNull("cm:title property is not null", title);
|
||||||
// Now change the setting to remove unset aspect properties
|
assertNull("cm:description property is not null", descr);
|
||||||
executer.setCarryAspectProperties(false);
|
}
|
||||||
// Extract again
|
finally
|
||||||
executer.execute(action, this.nodeRef);
|
{
|
||||||
|
executer.setCarryAspectProperties(true);
|
||||||
// cm:titled properties should *NOT* be present
|
}
|
||||||
title = nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE);
|
}
|
||||||
descr = nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION);
|
|
||||||
|
/**
|
||||||
assertNull("cm:title property is not null", title);
|
* Test execution of the pragmatic approach
|
||||||
assertNull("cm:description property is not null", descr);
|
*/
|
||||||
}
|
@Test
|
||||||
finally
|
public void testFromPartial() throws Exception
|
||||||
{
|
{
|
||||||
executer.setCarryAspectProperties(true);
|
// Test that the action does not overwrite properties that are already
|
||||||
}
|
// set
|
||||||
}
|
String myCreator = "Null-op";
|
||||||
|
String myTitle = "The hot dog is eaten by the city fox";
|
||||||
/**
|
|
||||||
* Test execution of the pragmatic approach
|
// Get the old props
|
||||||
*/
|
Map<QName, Serializable> props = this.nodeService.getProperties(this.nodeRef);
|
||||||
@Test
|
props.put(ContentModel.PROP_AUTHOR, myCreator);
|
||||||
public void testFromPartial() throws Exception
|
props.put(ContentModel.PROP_TITLE, myTitle);
|
||||||
{
|
props.remove(ContentModel.PROP_DESCRIPTION); // Allow this baby
|
||||||
// Test that the action does not overwrite properties that are already
|
this.nodeService.setProperties(this.nodeRef, props);
|
||||||
// set
|
|
||||||
String myCreator = "Null-op";
|
// Make the nodeRef visible to other transactions as it will need to be in async requests
|
||||||
String myTitle = "The hot dog is eaten by the city fox";
|
TestTransaction.flagForCommit();
|
||||||
|
TestTransaction.end();
|
||||||
// Get the old props
|
|
||||||
Map<QName, Serializable> props = this.nodeService.getProperties(this.nodeRef);
|
// Execute the action
|
||||||
props.put(ContentModel.PROP_AUTHOR, myCreator);
|
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>() {
|
||||||
props.put(ContentModel.PROP_TITLE, myTitle);
|
public Void execute() throws Throwable
|
||||||
props.remove(ContentModel.PROP_DESCRIPTION); // Allow this baby
|
{
|
||||||
this.nodeService.setProperties(this.nodeRef, props);
|
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
|
||||||
|
executer.execute(action, nodeRef);
|
||||||
// Make the nodeRef visible to other transactions as it will need to be in async requests
|
return null;
|
||||||
TestTransaction.flagForCommit();
|
}
|
||||||
TestTransaction.end();
|
});
|
||||||
|
|
||||||
// Execute the action
|
// Need to wait for the async extract
|
||||||
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>()
|
await().pollInSameThread()
|
||||||
{
|
.atMost(MAX_ASYNC_TIMEOUT)
|
||||||
public Void execute() throws Throwable
|
.until(() -> nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION), Objects::nonNull);
|
||||||
{
|
|
||||||
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
|
// Check that the properties have been preserved, but that description has been set
|
||||||
executer.execute(action, nodeRef);
|
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>() {
|
||||||
return null;
|
public Void execute() throws Throwable
|
||||||
}
|
{
|
||||||
});
|
assertEquals(myTitle, nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE));
|
||||||
|
assertEquals(myCreator, nodeService.getProperty(nodeRef, ContentModel.PROP_AUTHOR));
|
||||||
Thread.sleep(3000); // Need to wait for the async extract
|
|
||||||
|
assertEquals(QUICK_DESCRIPTION, nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION));
|
||||||
// Check that the properties have been preserved, but that description has been set
|
return null;
|
||||||
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>()
|
}
|
||||||
{
|
});
|
||||||
public Void execute() throws Throwable
|
}
|
||||||
{
|
}
|
||||||
assertEquals(myTitle, nodeService.getProperty(nodeRef, ContentModel.PROP_TITLE));
|
|
||||||
assertEquals(myCreator, nodeService.getProperty(nodeRef, ContentModel.PROP_AUTHOR));
|
|
||||||
|
|
||||||
assertEquals(QUICK_DESCRIPTION, nodeService.getProperty(nodeRef, ContentModel.PROP_DESCRIPTION));
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@@ -1,478 +1,475 @@
|
|||||||
/*
|
/*
|
||||||
* #%L
|
* #%L
|
||||||
* Alfresco Repository
|
* Alfresco Repository
|
||||||
* %%
|
* %%
|
||||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||||
* %%
|
* %%
|
||||||
* This file is part of the Alfresco software.
|
* This file is part of the Alfresco software.
|
||||||
* If the software was purchased under a paid Alfresco license, the terms of
|
* If the software was purchased under a paid Alfresco license, the terms of
|
||||||
* the paid license agreement will prevail. Otherwise, the software is
|
* the paid license agreement will prevail. Otherwise, the software is
|
||||||
* provided under the following open source license terms:
|
* provided under the following open source license terms:
|
||||||
*
|
*
|
||||||
* Alfresco is free software: you can redistribute it and/or modify
|
* Alfresco is free software: you can redistribute it and/or modify
|
||||||
* it under the terms of the GNU Lesser General Public License as published by
|
* it under the terms of the GNU Lesser General Public License as published by
|
||||||
* the Free Software Foundation, either version 3 of the License, or
|
* the Free Software Foundation, either version 3 of the License, or
|
||||||
* (at your option) any later version.
|
* (at your option) any later version.
|
||||||
*
|
*
|
||||||
* Alfresco is distributed in the hope that it will be useful,
|
* Alfresco is distributed in the hope that it will be useful,
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
* GNU Lesser General Public License for more details.
|
* GNU Lesser General Public License for more details.
|
||||||
*
|
*
|
||||||
* You should have received a copy of the GNU Lesser General Public License
|
* You should have received a copy of the GNU Lesser General Public License
|
||||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
package org.alfresco.repo.content.caching.cleanup;
|
package org.alfresco.repo.content.caching.cleanup;
|
||||||
|
|
||||||
|
import static org.awaitility.Awaitility.await;
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
import java.io.File;
|
import static org.junit.Assert.assertTrue;
|
||||||
import java.io.FileNotFoundException;
|
import static org.junit.Assert.fail;
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.PrintWriter;
|
import java.io.File;
|
||||||
import java.util.Calendar;
|
import java.io.FileNotFoundException;
|
||||||
import java.util.GregorianCalendar;
|
import java.io.IOException;
|
||||||
|
import java.io.PrintWriter;
|
||||||
import org.alfresco.repo.content.caching.CacheFileProps;
|
import java.time.Duration;
|
||||||
import org.alfresco.repo.content.caching.CachingContentStore;
|
import java.util.Calendar;
|
||||||
import org.alfresco.repo.content.caching.ContentCacheImpl;
|
import java.util.GregorianCalendar;
|
||||||
import org.alfresco.repo.content.caching.Key;
|
|
||||||
import org.alfresco.service.cmr.repository.ContentReader;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.alfresco.util.ApplicationContextHelper;
|
import org.junit.Before;
|
||||||
import org.alfresco.util.GUID;
|
import org.junit.BeforeClass;
|
||||||
import org.alfresco.util.testing.category.LuceneTests;
|
import org.junit.Test;
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.junit.experimental.categories.Category;
|
||||||
import org.junit.Before;
|
import org.springframework.context.ApplicationContext;
|
||||||
import org.junit.BeforeClass;
|
|
||||||
import org.junit.Test;
|
import org.alfresco.repo.content.caching.CacheFileProps;
|
||||||
import org.junit.experimental.categories.Category;
|
import org.alfresco.repo.content.caching.CachingContentStore;
|
||||||
import org.springframework.context.ApplicationContext;
|
import org.alfresco.repo.content.caching.ContentCacheImpl;
|
||||||
|
import org.alfresco.repo.content.caching.Key;
|
||||||
/**
|
import org.alfresco.service.cmr.repository.ContentReader;
|
||||||
* Tests for the CachedContentCleanupJob
|
import org.alfresco.util.ApplicationContextHelper;
|
||||||
*
|
import org.alfresco.util.GUID;
|
||||||
* @author Matt Ward
|
import org.alfresco.util.testing.category.LuceneTests;
|
||||||
*/
|
|
||||||
@Category(LuceneTests.class)
|
/**
|
||||||
public class CachedContentCleanupJobTest
|
* Tests for the CachedContentCleanupJob
|
||||||
{
|
*
|
||||||
private enum UrlSource { PROPS_FILE, REVERSE_CACHE_LOOKUP, NOT_PRESENT };
|
* @author Matt Ward
|
||||||
private static ApplicationContext ctx;
|
*/
|
||||||
private CachingContentStore cachingStore;
|
@Category(LuceneTests.class)
|
||||||
private ContentCacheImpl cache;
|
public class CachedContentCleanupJobTest
|
||||||
private File cacheRoot;
|
{
|
||||||
private CachedContentCleaner cleaner;
|
|
||||||
|
private static final Duration MAX_WAIT_TIMEOUT = Duration.ofSeconds(10);
|
||||||
|
|
||||||
@BeforeClass
|
private enum UrlSource
|
||||||
public static void beforeClass()
|
{
|
||||||
{
|
PROPS_FILE, REVERSE_CACHE_LOOKUP, NOT_PRESENT
|
||||||
String cleanerConf = "classpath:cachingstore/test-cleaner-context.xml";
|
}
|
||||||
ctx = ApplicationContextHelper.getApplicationContext(new String[] { cleanerConf });
|
|
||||||
}
|
;
|
||||||
|
|
||||||
|
private static ApplicationContext ctx;
|
||||||
@Before
|
private CachingContentStore cachingStore;
|
||||||
public void setUp() throws IOException
|
private ContentCacheImpl cache;
|
||||||
{
|
private File cacheRoot;
|
||||||
cachingStore = (CachingContentStore) ctx.getBean("cachingContentStore");
|
private CachedContentCleaner cleaner;
|
||||||
cache = (ContentCacheImpl) ctx.getBean("contentCache");
|
|
||||||
cacheRoot = cache.getCacheRoot();
|
@BeforeClass
|
||||||
cleaner = (CachedContentCleaner) ctx.getBean("cachedContentCleaner");
|
public static void beforeClass()
|
||||||
cleaner.setMinFileAgeMillis(0);
|
{
|
||||||
cleaner.setMaxDeleteWatchCount(0);
|
String cleanerConf = "classpath:cachingstore/test-cleaner-context.xml";
|
||||||
|
ctx = ApplicationContextHelper.getApplicationContext(new String[]{cleanerConf});
|
||||||
// Clear the cache from disk and memory
|
}
|
||||||
cache.removeAll();
|
|
||||||
FileUtils.cleanDirectory(cacheRoot);
|
@Before
|
||||||
}
|
public void setUp() throws IOException
|
||||||
|
{
|
||||||
|
cachingStore = (CachingContentStore) ctx.getBean("cachingContentStore");
|
||||||
@Test
|
cache = (ContentCacheImpl) ctx.getBean("contentCache");
|
||||||
public void filesNotInCacheAreDeleted() throws InterruptedException
|
cacheRoot = cache.getCacheRoot();
|
||||||
{
|
cleaner = (CachedContentCleaner) ctx.getBean("cachedContentCleaner");
|
||||||
cleaner.setMaxDeleteWatchCount(0);
|
cleaner.setMinFileAgeMillis(0);
|
||||||
int numFiles = 300; // Must be a multiple of number of UrlSource types being tested
|
cleaner.setMaxDeleteWatchCount(0);
|
||||||
long totalSize = 0; // what is the total size of the sample files?
|
|
||||||
File[] files = new File[numFiles];
|
// Clear the cache from disk and memory
|
||||||
for (int i = 0; i < numFiles; i++)
|
cache.removeAll();
|
||||||
{
|
FileUtils.cleanDirectory(cacheRoot);
|
||||||
// Testing with a number of files. The cached file cleaner will be able to determine the 'original'
|
}
|
||||||
// content URL for each file by either retrieving from the companion properties file, or performing
|
|
||||||
// a 'reverse lookup' in the cache (i.e. cache.contains(Key.forCacheFile(...))), or there will be no
|
@Test
|
||||||
// URL determinable for the file.
|
public void filesNotInCacheAreDeleted() throws InterruptedException
|
||||||
UrlSource urlSource = UrlSource.values()[i % UrlSource.values().length];
|
{
|
||||||
File cacheFile = createCacheFile(urlSource, false);
|
cleaner.setMaxDeleteWatchCount(0);
|
||||||
files[i] = cacheFile;
|
int numFiles = 300; // Must be a multiple of number of UrlSource types being tested
|
||||||
totalSize += cacheFile.length();
|
long totalSize = 0; // what is the total size of the sample files?
|
||||||
}
|
File[] files = new File[numFiles];
|
||||||
|
for (int i = 0; i < numFiles; i++)
|
||||||
// Run cleaner
|
{
|
||||||
cleaner.execute();
|
// Testing with a number of files. The cached file cleaner will be able to determine the 'original'
|
||||||
|
// content URL for each file by either retrieving from the companion properties file, or performing
|
||||||
Thread.sleep(400);
|
// a 'reverse lookup' in the cache (i.e. cache.contains(Key.forCacheFile(...))), or there will be no
|
||||||
while (cleaner.isRunning())
|
// URL determinable for the file.
|
||||||
{
|
UrlSource urlSource = UrlSource.values()[i % UrlSource.values().length];
|
||||||
Thread.sleep(200);
|
File cacheFile = createCacheFile(urlSource, false);
|
||||||
}
|
files[i] = cacheFile;
|
||||||
|
totalSize += cacheFile.length();
|
||||||
// check all files deleted
|
}
|
||||||
for (File file : files)
|
|
||||||
{
|
// Run cleaner
|
||||||
assertFalse("File should have been deleted: " + file, file.exists());
|
cleaner.execute();
|
||||||
}
|
|
||||||
|
await().pollDelay(Duration.ofMillis(100))
|
||||||
assertEquals("Incorrect number of deleted files", numFiles, cleaner.getNumFilesDeleted());
|
.atMost(MAX_WAIT_TIMEOUT)
|
||||||
assertEquals("Incorrect total size of files deleted", totalSize, cleaner.getSizeFilesDeleted());
|
.until(() -> !cleaner.isRunning());
|
||||||
}
|
|
||||||
|
// check all files deleted
|
||||||
|
for (File file : files)
|
||||||
@Test
|
{
|
||||||
public void filesNewerThanMinFileAgeMillisAreNotDeleted() throws InterruptedException
|
assertFalse("File should have been deleted: " + file, file.exists());
|
||||||
{
|
}
|
||||||
final long minFileAge = 5000;
|
|
||||||
cleaner.setMinFileAgeMillis(minFileAge);
|
assertEquals("Incorrect number of deleted files", numFiles, cleaner.getNumFilesDeleted());
|
||||||
cleaner.setMaxDeleteWatchCount(0);
|
assertEquals("Incorrect total size of files deleted", totalSize, cleaner.getSizeFilesDeleted());
|
||||||
int numFiles = 10;
|
}
|
||||||
|
|
||||||
File[] oldFiles = new File[numFiles];
|
@Test
|
||||||
for (int i = 0; i < numFiles; i++)
|
public void filesNewerThanMinFileAgeMillisAreNotDeleted() throws InterruptedException
|
||||||
{
|
{
|
||||||
oldFiles[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, false);
|
final long minFileAge = 5000;
|
||||||
}
|
cleaner.setMinFileAgeMillis(minFileAge);
|
||||||
|
cleaner.setMaxDeleteWatchCount(0);
|
||||||
// Sleep to make sure 'old' files really are older than minFileAgeMillis
|
int numFiles = 10;
|
||||||
Thread.sleep(minFileAge);
|
|
||||||
|
File[] oldFiles = new File[numFiles];
|
||||||
File[] newFiles = new File[numFiles];
|
for (int i = 0; i < numFiles; i++)
|
||||||
long newFilesTotalSize = 0;
|
{
|
||||||
for (int i = 0; i < numFiles; i++)
|
oldFiles[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, false);
|
||||||
{
|
}
|
||||||
newFiles[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, false);
|
|
||||||
newFilesTotalSize += newFiles[i].length();
|
// Sleep to make sure 'old' files really are older than minFileAgeMillis
|
||||||
}
|
Thread.sleep(minFileAge);
|
||||||
|
|
||||||
|
File[] newFiles = new File[numFiles];
|
||||||
// The cleaner must finish before any of the newFiles are older than minFileAge. If the files are too
|
long newFilesTotalSize = 0;
|
||||||
// old the test will fail and it will be necessary to rethink how to test this.
|
for (int i = 0; i < numFiles; i++)
|
||||||
cleaner.execute();
|
{
|
||||||
|
newFiles[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, false);
|
||||||
Thread.sleep(400);
|
newFilesTotalSize += newFiles[i].length();
|
||||||
while (cleaner.isRunning())
|
}
|
||||||
{
|
|
||||||
Thread.sleep(200);
|
// The cleaner must finish before any of the newFiles are older than minFileAge. If the files are too
|
||||||
}
|
// old the test will fail and it will be necessary to rethink how to test this.
|
||||||
|
cleaner.execute();
|
||||||
if (cleaner.getDurationMillis() > minFileAge)
|
|
||||||
{
|
await().pollDelay(Duration.ofMillis(100))
|
||||||
fail("Test unable to complete, since cleaner took " + cleaner.getDurationMillis() + "ms" +
|
.atMost(MAX_WAIT_TIMEOUT)
|
||||||
" which is longer than minFileAge [" + minFileAge + "ms]");
|
.until(() -> !cleaner.isRunning());
|
||||||
}
|
|
||||||
|
if (cleaner.getDurationMillis() > minFileAge)
|
||||||
// check all 'old' files deleted
|
{
|
||||||
for (File file : oldFiles)
|
fail("Test unable to complete, since cleaner took " + cleaner.getDurationMillis() + "ms" +
|
||||||
{
|
" which is longer than minFileAge [" + minFileAge + "ms]");
|
||||||
assertFalse("File should have been deleted: " + file, file.exists());
|
}
|
||||||
}
|
|
||||||
// check all 'new' files still present
|
// check all 'old' files deleted
|
||||||
for (File file : newFiles)
|
for (File file : oldFiles)
|
||||||
{
|
{
|
||||||
assertTrue("File should not have been deleted: " + file, file.exists());
|
assertFalse("File should have been deleted: " + file, file.exists());
|
||||||
}
|
}
|
||||||
|
// check all 'new' files still present
|
||||||
assertEquals("Incorrect number of deleted files", newFiles.length, cleaner.getNumFilesDeleted());
|
for (File file : newFiles)
|
||||||
assertEquals("Incorrect total size of files deleted", newFilesTotalSize, cleaner.getSizeFilesDeleted());
|
{
|
||||||
}
|
assertTrue("File should not have been deleted: " + file, file.exists());
|
||||||
|
}
|
||||||
@Test
|
|
||||||
public void aggressiveCleanReclaimsTargetSpace() throws InterruptedException
|
assertEquals("Incorrect number of deleted files", newFiles.length, cleaner.getNumFilesDeleted());
|
||||||
{
|
assertEquals("Incorrect total size of files deleted", newFilesTotalSize, cleaner.getSizeFilesDeleted());
|
||||||
int numFiles = 30;
|
}
|
||||||
File[] files = new File[numFiles];
|
|
||||||
for (int i = 0; i < numFiles; i++)
|
@Test
|
||||||
{
|
public void aggressiveCleanReclaimsTargetSpace() throws InterruptedException
|
||||||
// Make sure it's in the cache - all the files will be in the cache, so the
|
{
|
||||||
// cleaner won't clean any up once it has finished aggressively reclaiming space.
|
int numFiles = 30;
|
||||||
files[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, true);
|
File[] files = new File[numFiles];
|
||||||
}
|
for (int i = 0; i < numFiles; i++)
|
||||||
|
{
|
||||||
// How much space to reclaim - seven files worth (all files are same size)
|
// Make sure it's in the cache - all the files will be in the cache, so the
|
||||||
long fileSize = files[0].length();
|
// cleaner won't clean any up once it has finished aggressively reclaiming space.
|
||||||
long sevenFilesSize = 7 * fileSize;
|
files[i] = createCacheFile(UrlSource.REVERSE_CACHE_LOOKUP, true);
|
||||||
|
}
|
||||||
// We'll get it to clean seven files worth aggressively and then it will continue non-aggressively.
|
|
||||||
// It will delete the older files aggressively (i.e. the ones prior to the two second sleep) and
|
// How much space to reclaim - seven files worth (all files are same size)
|
||||||
// then will examine the new files for potential deletion.
|
long fileSize = files[0].length();
|
||||||
// Since some of the newer files are not in the cache, it will delete those.
|
long sevenFilesSize = 7 * fileSize;
|
||||||
cleaner.executeAggressive("aggressiveCleanReclaimsTargetSpace()", sevenFilesSize);
|
|
||||||
|
// We'll get it to clean seven files worth aggressively and then it will continue non-aggressively.
|
||||||
Thread.sleep(400);
|
// It will delete the older files aggressively (i.e. the ones prior to the two second sleep) and
|
||||||
while (cleaner.isRunning())
|
// then will examine the new files for potential deletion.
|
||||||
{
|
// Since some of the newer files are not in the cache, it will delete those.
|
||||||
Thread.sleep(200);
|
cleaner.executeAggressive("aggressiveCleanReclaimsTargetSpace()", sevenFilesSize);
|
||||||
}
|
|
||||||
|
Thread.sleep(400);
|
||||||
int numDeleted = 0;
|
while (cleaner.isRunning())
|
||||||
|
{
|
||||||
for (File f : files)
|
Thread.sleep(200);
|
||||||
{
|
}
|
||||||
if (!f.exists())
|
|
||||||
{
|
int numDeleted = 0;
|
||||||
numDeleted++;
|
|
||||||
}
|
for (File f : files)
|
||||||
}
|
{
|
||||||
// How many were definitely deleted?
|
if (!f.exists())
|
||||||
assertEquals("Wrong number of files deleted", 7 , numDeleted);
|
{
|
||||||
|
numDeleted++;
|
||||||
// The cleaner should have recorded the correct number of deletions
|
}
|
||||||
assertEquals("Incorrect number of deleted files", 7, cleaner.getNumFilesDeleted());
|
}
|
||||||
assertEquals("Incorrect total size of files deleted", sevenFilesSize, cleaner.getSizeFilesDeleted());
|
// How many were definitely deleted?
|
||||||
}
|
assertEquals("Wrong number of files deleted", 7, numDeleted);
|
||||||
|
|
||||||
|
// The cleaner should have recorded the correct number of deletions
|
||||||
@Test
|
assertEquals("Incorrect number of deleted files", 7, cleaner.getNumFilesDeleted());
|
||||||
public void standardCleanAfterAggressiveFinished() throws InterruptedException
|
assertEquals("Incorrect total size of files deleted", sevenFilesSize, cleaner.getSizeFilesDeleted());
|
||||||
{
|
}
|
||||||
// Don't use numFiles > 59! as we're using this for the minute element in the cache file path.
|
|
||||||
final int numFiles = 30;
|
@Test
|
||||||
File[] files = new File[numFiles];
|
public void standardCleanAfterAggressiveFinished() throws InterruptedException
|
||||||
|
{
|
||||||
|
// Don't use numFiles > 59! as we're using this for the minute element in the cache file path.
|
||||||
for (int i = 0; i < numFiles; i++)
|
final int numFiles = 30;
|
||||||
{
|
File[] files = new File[numFiles];
|
||||||
Calendar calendar = new GregorianCalendar(2010, 11, 2, 17, i);
|
|
||||||
|
for (int i = 0; i < numFiles; i++)
|
||||||
if (i >= 21 && i <= 24)
|
{
|
||||||
{
|
Calendar calendar = new GregorianCalendar(2010, 11, 2, 17, i);
|
||||||
// 21 to 24 will be deleted after the aggressive deletions (once the cleaner has returned
|
|
||||||
// to normal cleaning), because they are not in the cache.
|
if (i >= 21 && i <= 24)
|
||||||
files[i] = createCacheFile(calendar, UrlSource.NOT_PRESENT, false);
|
{
|
||||||
}
|
// 21 to 24 will be deleted after the aggressive deletions (once the cleaner has returned
|
||||||
else
|
// to normal cleaning), because they are not in the cache.
|
||||||
{
|
files[i] = createCacheFile(calendar, UrlSource.NOT_PRESENT, false);
|
||||||
// All other files will be in the cache
|
}
|
||||||
files[i] = createCacheFile(calendar, UrlSource.REVERSE_CACHE_LOOKUP, true);
|
else
|
||||||
}
|
{
|
||||||
}
|
// All other files will be in the cache
|
||||||
|
files[i] = createCacheFile(calendar, UrlSource.REVERSE_CACHE_LOOKUP, true);
|
||||||
// How much space to reclaim - seven files worth (all files are same size)
|
}
|
||||||
long fileSize = files[0].length();
|
}
|
||||||
long sevenFilesSize = 7 * fileSize;
|
|
||||||
|
// How much space to reclaim - seven files worth (all files are same size)
|
||||||
// We'll get it to clean seven files worth aggressively and then it will continue non-aggressively.
|
long fileSize = files[0].length();
|
||||||
// It will delete the older files aggressively (i.e. even if they are actively in the cache) and
|
long sevenFilesSize = 7 * fileSize;
|
||||||
// then will examine the new files for potential deletion.
|
|
||||||
// Since some of the newer files are not in the cache, it will delete those too.
|
// We'll get it to clean seven files worth aggressively and then it will continue non-aggressively.
|
||||||
cleaner.executeAggressive("standardCleanAfterAggressiveFinished()", sevenFilesSize);
|
// It will delete the older files aggressively (i.e. even if they are actively in the cache) and
|
||||||
|
// then will examine the new files for potential deletion.
|
||||||
Thread.sleep(400);
|
// Since some of the newer files are not in the cache, it will delete those too.
|
||||||
while (cleaner.isRunning())
|
cleaner.executeAggressive("standardCleanAfterAggressiveFinished()", sevenFilesSize);
|
||||||
{
|
|
||||||
Thread.sleep(200);
|
Thread.sleep(400);
|
||||||
}
|
while (cleaner.isRunning())
|
||||||
|
{
|
||||||
for (int i = 0; i < numFiles; i++)
|
Thread.sleep(200);
|
||||||
{
|
}
|
||||||
if (i < 7)
|
|
||||||
{
|
for (int i = 0; i < numFiles; i++)
|
||||||
assertFalse("First 7 files should have been aggressively cleaned", files[i].exists());
|
{
|
||||||
}
|
if (i < 7)
|
||||||
|
{
|
||||||
if (i >= 21 && i <= 24)
|
assertFalse("First 7 files should have been aggressively cleaned", files[i].exists());
|
||||||
{
|
}
|
||||||
assertFalse("Files with indexes 21-24 should have been deleted", files[i].exists());
|
|
||||||
}
|
if (i >= 21 && i <= 24)
|
||||||
}
|
{
|
||||||
assertEquals("Incorrect number of deleted files", 11, cleaner.getNumFilesDeleted());
|
assertFalse("Files with indexes 21-24 should have been deleted", files[i].exists());
|
||||||
assertEquals("Incorrect total size of files deleted", (11*fileSize), cleaner.getSizeFilesDeleted());
|
}
|
||||||
}
|
}
|
||||||
|
assertEquals("Incorrect number of deleted files", 11, cleaner.getNumFilesDeleted());
|
||||||
@Test
|
assertEquals("Incorrect total size of files deleted", (11 * fileSize), cleaner.getSizeFilesDeleted());
|
||||||
public void emptyParentDirectoriesAreDeleted() throws FileNotFoundException
|
}
|
||||||
{
|
|
||||||
cleaner.setMaxDeleteWatchCount(0);
|
@Test
|
||||||
File file = new File(cacheRoot, "243235984/a/b/c/d.bin");
|
public void emptyParentDirectoriesAreDeleted() throws FileNotFoundException
|
||||||
file.getParentFile().mkdirs();
|
{
|
||||||
PrintWriter writer = new PrintWriter(file);
|
cleaner.setMaxDeleteWatchCount(0);
|
||||||
writer.println("Content for emptyParentDirectoriesAreDeleted");
|
File file = new File(cacheRoot, "243235984/a/b/c/d.bin");
|
||||||
writer.close();
|
file.getParentFile().mkdirs();
|
||||||
assertTrue("Directory should exist", new File(cacheRoot, "243235984/a/b/c").exists());
|
PrintWriter writer = new PrintWriter(file);
|
||||||
|
writer.println("Content for emptyParentDirectoriesAreDeleted");
|
||||||
cleaner.handle(file);
|
writer.close();
|
||||||
|
assertTrue("Directory should exist", new File(cacheRoot, "243235984/a/b/c").exists());
|
||||||
assertFalse("Directory should have been deleted", new File(cacheRoot, "243235984").exists());
|
|
||||||
}
|
cleaner.handle(file);
|
||||||
|
|
||||||
@Test
|
assertFalse("Directory should have been deleted", new File(cacheRoot, "243235984").exists());
|
||||||
public void markedFilesHaveDeletionDeferredUntilCorrectPassOfCleaner()
|
}
|
||||||
{
|
|
||||||
// A non-advisable setting but useful for testing, maxDeleteWatchCount of zero
|
@Test
|
||||||
// which should result in immediate deletion upon discovery of content no longer in the cache.
|
public void markedFilesHaveDeletionDeferredUntilCorrectPassOfCleaner()
|
||||||
cleaner.setMaxDeleteWatchCount(0);
|
{
|
||||||
File file = createCacheFile(UrlSource.NOT_PRESENT, false);
|
// A non-advisable setting but useful for testing, maxDeleteWatchCount of zero
|
||||||
|
// which should result in immediate deletion upon discovery of content no longer in the cache.
|
||||||
cleaner.handle(file);
|
cleaner.setMaxDeleteWatchCount(0);
|
||||||
checkFilesDeleted(file);
|
File file = createCacheFile(UrlSource.NOT_PRESENT, false);
|
||||||
|
|
||||||
// Anticipated to be the most common setting: maxDeleteWatchCount of 1.
|
cleaner.handle(file);
|
||||||
cleaner.setMaxDeleteWatchCount(1);
|
checkFilesDeleted(file);
|
||||||
file = createCacheFile(UrlSource.NOT_PRESENT, false);
|
|
||||||
|
// Anticipated to be the most common setting: maxDeleteWatchCount of 1.
|
||||||
cleaner.handle(file);
|
cleaner.setMaxDeleteWatchCount(1);
|
||||||
checkWatchCountForCacheFile(file, 1);
|
file = createCacheFile(UrlSource.NOT_PRESENT, false);
|
||||||
|
|
||||||
cleaner.handle(file);
|
cleaner.handle(file);
|
||||||
checkFilesDeleted(file);
|
checkWatchCountForCacheFile(file, 1);
|
||||||
|
|
||||||
// Check that some other arbitrary figure for maxDeleteWatchCount works correctly.
|
cleaner.handle(file);
|
||||||
cleaner.setMaxDeleteWatchCount(3);
|
checkFilesDeleted(file);
|
||||||
file = createCacheFile(UrlSource.NOT_PRESENT, false);
|
|
||||||
|
// Check that some other arbitrary figure for maxDeleteWatchCount works correctly.
|
||||||
cleaner.handle(file);
|
cleaner.setMaxDeleteWatchCount(3);
|
||||||
checkWatchCountForCacheFile(file, 1);
|
file = createCacheFile(UrlSource.NOT_PRESENT, false);
|
||||||
|
|
||||||
cleaner.handle(file);
|
cleaner.handle(file);
|
||||||
checkWatchCountForCacheFile(file, 2);
|
checkWatchCountForCacheFile(file, 1);
|
||||||
|
|
||||||
cleaner.handle(file);
|
cleaner.handle(file);
|
||||||
checkWatchCountForCacheFile(file, 3);
|
checkWatchCountForCacheFile(file, 2);
|
||||||
|
|
||||||
cleaner.handle(file);
|
cleaner.handle(file);
|
||||||
checkFilesDeleted(file);
|
checkWatchCountForCacheFile(file, 3);
|
||||||
}
|
|
||||||
|
cleaner.handle(file);
|
||||||
|
checkFilesDeleted(file);
|
||||||
private void checkFilesDeleted(File file)
|
}
|
||||||
{
|
|
||||||
assertFalse("File should have been deleted: " + file, file.exists());
|
private void checkFilesDeleted(File file)
|
||||||
CacheFileProps props = new CacheFileProps(file);
|
{
|
||||||
assertFalse("Properties file should have been deleted, cache file: " + file, props.exists());
|
assertFalse("File should have been deleted: " + file, file.exists());
|
||||||
}
|
CacheFileProps props = new CacheFileProps(file);
|
||||||
|
assertFalse("Properties file should have been deleted, cache file: " + file, props.exists());
|
||||||
|
}
|
||||||
private void checkWatchCountForCacheFile(File file, Integer expectedWatchCount)
|
|
||||||
{
|
private void checkWatchCountForCacheFile(File file, Integer expectedWatchCount)
|
||||||
assertTrue("File should still exist: " + file, file.exists());
|
{
|
||||||
CacheFileProps props = new CacheFileProps(file);
|
assertTrue("File should still exist: " + file, file.exists());
|
||||||
props.load();
|
CacheFileProps props = new CacheFileProps(file);
|
||||||
assertEquals("File should contain correct deleteWatchCount", expectedWatchCount, props.getDeleteWatchCount());
|
props.load();
|
||||||
}
|
assertEquals("File should contain correct deleteWatchCount", expectedWatchCount, props.getDeleteWatchCount());
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void filesInCacheAreNotDeleted() throws InterruptedException
|
public void filesInCacheAreNotDeleted() throws InterruptedException
|
||||||
{
|
{
|
||||||
cleaner.setMaxDeleteWatchCount(0);
|
cleaner.setMaxDeleteWatchCount(0);
|
||||||
|
|
||||||
// The SlowContentStore will always give out content when asked,
|
// The SlowContentStore will always give out content when asked,
|
||||||
// so asking for any content will cause something to be cached.
|
// so asking for any content will cause something to be cached.
|
||||||
String url = makeContentUrl();
|
String url = makeContentUrl();
|
||||||
int numFiles = 50;
|
int numFiles = 50;
|
||||||
for (int i = 0; i < numFiles; i++)
|
for (int i = 0; i < numFiles; i++)
|
||||||
{
|
{
|
||||||
ContentReader reader = cachingStore.getReader(url);
|
ContentReader reader = cachingStore.getReader(url);
|
||||||
reader.getContentString();
|
reader.getContentString();
|
||||||
}
|
}
|
||||||
|
|
||||||
cleaner.execute();
|
cleaner.execute();
|
||||||
|
|
||||||
Thread.sleep(400);
|
Thread.sleep(400);
|
||||||
while (cleaner.isRunning())
|
while (cleaner.isRunning())
|
||||||
{
|
{
|
||||||
Thread.sleep(200);
|
Thread.sleep(200);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (int i = 0; i < numFiles; i++)
|
for (int i = 0; i < numFiles; i++)
|
||||||
{
|
{
|
||||||
File cacheFile = new File(cache.getCacheFilePath(url));
|
File cacheFile = new File(cache.getCacheFilePath(url));
|
||||||
assertTrue("File should exist", cacheFile.exists());
|
assertTrue("File should exist", cacheFile.exists());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private File createCacheFile(UrlSource urlSource, boolean putInCache)
|
private File createCacheFile(UrlSource urlSource, boolean putInCache)
|
||||||
{
|
{
|
||||||
Calendar calendar = new GregorianCalendar();
|
Calendar calendar = new GregorianCalendar();
|
||||||
return createCacheFile(calendar, urlSource, putInCache);
|
return createCacheFile(calendar, urlSource, putInCache);
|
||||||
}
|
}
|
||||||
|
|
||||||
private File createCacheFile(Calendar calendar, /*int year, int month, int day, int hour, int minute,*/
|
private File createCacheFile(Calendar calendar, /* int year, int month, int day, int hour, int minute, */
|
||||||
UrlSource urlSource, boolean putInCache)
|
UrlSource urlSource, boolean putInCache)
|
||||||
{
|
{
|
||||||
File file = new File(cacheRoot, createNewCacheFilePath(calendar));
|
File file = new File(cacheRoot, createNewCacheFilePath(calendar));
|
||||||
file.getParentFile().mkdirs();
|
file.getParentFile().mkdirs();
|
||||||
writeSampleContent(file);
|
writeSampleContent(file);
|
||||||
String contentUrl = makeContentUrl();
|
String contentUrl = makeContentUrl();
|
||||||
|
|
||||||
if (putInCache)
|
if (putInCache)
|
||||||
{
|
{
|
||||||
cache.putIntoLookup(Key.forUrl(contentUrl), file.getAbsolutePath());
|
cache.putIntoLookup(Key.forUrl(contentUrl), file.getAbsolutePath());
|
||||||
}
|
}
|
||||||
|
|
||||||
switch(urlSource)
|
switch (urlSource)
|
||||||
{
|
{
|
||||||
case NOT_PRESENT:
|
case NOT_PRESENT:
|
||||||
// cache won't be able to determine original content URL for the file
|
// cache won't be able to determine original content URL for the file
|
||||||
break;
|
break;
|
||||||
case PROPS_FILE:
|
case PROPS_FILE:
|
||||||
// file with content URL in properties file
|
// file with content URL in properties file
|
||||||
CacheFileProps props = new CacheFileProps(file);
|
CacheFileProps props = new CacheFileProps(file);
|
||||||
props.setContentUrl(contentUrl);
|
props.setContentUrl(contentUrl);
|
||||||
props.store();
|
props.store();
|
||||||
break;
|
break;
|
||||||
case REVERSE_CACHE_LOOKUP:
|
case REVERSE_CACHE_LOOKUP:
|
||||||
// file with content URL in reverse lookup cache - but not 'in the cache' (forward lookup).
|
// file with content URL in reverse lookup cache - but not 'in the cache' (forward lookup).
|
||||||
cache.putIntoLookup(Key.forCacheFile(file), contentUrl);
|
cache.putIntoLookup(Key.forCacheFile(file), contentUrl);
|
||||||
}
|
}
|
||||||
assertTrue("File should exist", file.exists());
|
assertTrue("File should exist", file.exists());
|
||||||
return file;
|
return file;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
/**
|
* Mimick functionality of ContentCacheImpl.createNewCacheFilePath() but allowing a specific date (rather than 'now') to be used.
|
||||||
* Mimick functionality of ContentCacheImpl.createNewCacheFilePath()
|
*
|
||||||
* but allowing a specific date (rather than 'now') to be used.
|
* @param calendar
|
||||||
*
|
* Calendar
|
||||||
* @param calendar Calendar
|
* @return Path to use for cache file.
|
||||||
* @return Path to use for cache file.
|
*/
|
||||||
*/
|
private String createNewCacheFilePath(Calendar calendar)
|
||||||
private String createNewCacheFilePath(Calendar calendar)
|
{
|
||||||
{
|
int year = calendar.get(Calendar.YEAR);
|
||||||
int year = calendar.get(Calendar.YEAR);
|
int month = calendar.get(Calendar.MONTH) + 1; // 0-based
|
||||||
int month = calendar.get(Calendar.MONTH) + 1; // 0-based
|
int day = calendar.get(Calendar.DAY_OF_MONTH);
|
||||||
int day = calendar.get(Calendar.DAY_OF_MONTH);
|
int hour = calendar.get(Calendar.HOUR_OF_DAY);
|
||||||
int hour = calendar.get(Calendar.HOUR_OF_DAY);
|
int minute = calendar.get(Calendar.MINUTE);
|
||||||
int minute = calendar.get(Calendar.MINUTE);
|
// create the URL
|
||||||
// create the URL
|
StringBuilder sb = new StringBuilder(20);
|
||||||
StringBuilder sb = new StringBuilder(20);
|
sb.append(year).append('/')
|
||||||
sb.append(year).append('/')
|
.append(month).append('/')
|
||||||
.append(month).append('/')
|
.append(day).append('/')
|
||||||
.append(day).append('/')
|
.append(hour).append('/')
|
||||||
.append(hour).append('/')
|
.append(minute).append('/')
|
||||||
.append(minute).append('/')
|
.append(GUID.generate()).append(".bin");
|
||||||
.append(GUID.generate()).append(".bin");
|
return sb.toString();
|
||||||
return sb.toString();
|
}
|
||||||
}
|
|
||||||
|
private String makeContentUrl()
|
||||||
|
{
|
||||||
private String makeContentUrl()
|
return "protocol://some/made/up/url/" + GUID.generate();
|
||||||
{
|
}
|
||||||
return "protocol://some/made/up/url/" + GUID.generate();
|
|
||||||
}
|
private void writeSampleContent(File file)
|
||||||
|
{
|
||||||
|
try
|
||||||
private void writeSampleContent(File file)
|
{
|
||||||
{
|
PrintWriter writer = new PrintWriter(file);
|
||||||
try
|
writer.println("Content for sample file in " + getClass().getName());
|
||||||
{
|
writer.close();
|
||||||
PrintWriter writer = new PrintWriter(file);
|
}
|
||||||
writer.println("Content for sample file in " + getClass().getName());
|
catch (Throwable e)
|
||||||
writer.close();
|
{
|
||||||
}
|
throw new RuntimeException("Couldn't write file: " + file, e);
|
||||||
catch (Throwable e)
|
}
|
||||||
{
|
}
|
||||||
throw new RuntimeException("Couldn't write file: " + file, e);
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,57 +1,57 @@
|
|||||||
/*
|
/*
|
||||||
* #%L
|
* #%L
|
||||||
* Alfresco Repository
|
* Alfresco Repository
|
||||||
* %%
|
* %%
|
||||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
* Copyright (C) 2005 - 2025 Alfresco Software Limited
|
||||||
* %%
|
* %%
|
||||||
* This file is part of the Alfresco software.
|
* This file is part of the Alfresco software.
|
||||||
* If the software was purchased under a paid Alfresco license, the terms of
|
* If the software was purchased under a paid Alfresco license, the terms of
|
||||||
* the paid license agreement will prevail. Otherwise, the software is
|
* the paid license agreement will prevail. Otherwise, the software is
|
||||||
* provided under the following open source license terms:
|
* provided under the following open source license terms:
|
||||||
*
|
*
|
||||||
* Alfresco is free software: you can redistribute it and/or modify
|
* Alfresco is free software: you can redistribute it and/or modify
|
||||||
* it under the terms of the GNU Lesser General Public License as published by
|
* it under the terms of the GNU Lesser General Public License as published by
|
||||||
* the Free Software Foundation, either version 3 of the License, or
|
* the Free Software Foundation, either version 3 of the License, or
|
||||||
* (at your option) any later version.
|
* (at your option) any later version.
|
||||||
*
|
*
|
||||||
* Alfresco is distributed in the hope that it will be useful,
|
* Alfresco is distributed in the hope that it will be useful,
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
* GNU Lesser General Public License for more details.
|
* GNU Lesser General Public License for more details.
|
||||||
*
|
*
|
||||||
* You should have received a copy of the GNU Lesser General Public License
|
* You should have received a copy of the GNU Lesser General Public License
|
||||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
package org.alfresco.util;
|
package org.alfresco.util;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import java.time.Duration;
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
import junit.framework.TestCase;
|
||||||
import org.junit.runner.RunWith;
|
import org.apache.commons.logging.Log;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.springframework.context.ApplicationContext;
|
import org.junit.runner.RunWith;
|
||||||
import org.springframework.test.context.ContextConfiguration;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.test.context.ContextCustomizerFactories;
|
import org.springframework.context.ApplicationContext;
|
||||||
import org.springframework.test.context.junit4.SpringRunner;
|
import org.springframework.test.context.ContextConfiguration;
|
||||||
|
import org.springframework.test.context.ContextCustomizerFactories;
|
||||||
/**
|
import org.springframework.test.context.junit4.SpringRunner;
|
||||||
* Base test class providing Hibernate sessions.
|
|
||||||
* <p>
|
/**
|
||||||
* By default this is auto-wired by type. If a this is going to
|
* Base test class providing Hibernate sessions.
|
||||||
* result in a conlict the use auto-wire by name. This can be done by
|
* <p>
|
||||||
* setting populateProtectedVariables to true in the constructor and
|
* By default this is auto-wired by type. If a this is going to result in a conlict the use auto-wire by name. This can be done by setting populateProtectedVariables to true in the constructor and then adding protected members with the same name as the bean you require.
|
||||||
* then adding protected members with the same name as the bean you require.
|
*
|
||||||
*
|
* @author Derek Hulley
|
||||||
* @author Derek Hulley
|
*/
|
||||||
*/
|
@RunWith(SpringRunner.class)
|
||||||
@RunWith(SpringRunner.class)
|
@ContextConfiguration({"classpath:alfresco/application-context.xml"})
|
||||||
@ContextConfiguration({"classpath:alfresco/application-context.xml"})
|
@ContextCustomizerFactories(factories = {}, mergeMode = ContextCustomizerFactories.MergeMode.REPLACE_DEFAULTS)
|
||||||
@ContextCustomizerFactories(factories = {}, mergeMode = ContextCustomizerFactories.MergeMode.REPLACE_DEFAULTS)
|
public abstract class BaseSpringTest extends TestCase
|
||||||
public abstract class BaseSpringTest extends TestCase
|
{
|
||||||
{
|
protected static final Duration MAX_ASYNC_TIMEOUT = Duration.ofSeconds(10);
|
||||||
public Log logger = LogFactory.getLog(getClass().getName());
|
public Log logger = LogFactory.getLog(getClass().getName());
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
protected ApplicationContext applicationContext;
|
protected ApplicationContext applicationContext;
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user