Compare commits

..

1 Commits

Author SHA1 Message Date
Rajkumarp291989
af926a53d5 APPS-1190 2022-02-21 17:22:41 +05:30
103 changed files with 526 additions and 2095 deletions

View File

@@ -60,7 +60,7 @@ jobs:
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7-A1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext01TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - AppContext02TestSuite"
@@ -75,7 +75,7 @@ jobs:
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7-A1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext03TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - AppContext04TestSuite"
@@ -83,7 +83,7 @@ jobs:
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7-A1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext04TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - AppContext05TestSuite"
@@ -102,7 +102,7 @@ jobs:
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7-A1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContext06TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - AppContextExtraTestSuite"
@@ -110,7 +110,7 @@ jobs:
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7-A1
script: travis_wait 20 mvn -B test -pl repository -Dtest=AppContextExtraTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - MiscContextTestSuite"
@@ -118,7 +118,7 @@ jobs:
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7-A1
script: travis_wait 20 mvn -B test -pl repository -Dtest=MiscContextTestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Repository - SearchTestSuite"
@@ -241,7 +241,7 @@ jobs:
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7-A1
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContext02TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Remote-api - AppContext03TestSuite"
@@ -249,7 +249,7 @@ jobs:
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7-A1
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContext03TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Remote-api - AppContext04TestSuite"
@@ -257,7 +257,7 @@ jobs:
before_script:
- docker run -d -p 5433:5432 -e POSTGRES_PASSWORD=alfresco -e POSTGRES_USER=alfresco -e POSTGRES_DB=alfresco postgres:13.3 postgres -c 'max_connections=300'
- docker run -d -p 61616:61616 -p 5672:5672 alfresco/alfresco-activemq:5.16.1
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7
- docker run -d -p 8090:8090 -e JAVA_OPTS=" -Xms256m -Xmx256m" alfresco/alfresco-transform-core-aio:2.5.7-A1
script: travis_wait 20 mvn -B test -pl remote-api -Dtest=AppContext04TestSuite -Ddb.driver=org.postgresql.Driver -Ddb.name=alfresco -Ddb.url=jdbc:postgresql://localhost:5433/alfresco -Ddb.username=alfresco -Ddb.password=alfresco
- name: "Remote-api - AppContextExtraTestSuite"

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-automation-community-repo</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<build>

View File

@@ -73,5 +73,4 @@ public class RecordFoldersAPI extends BaseAPI
}
return null;
}
}

View File

@@ -114,11 +114,11 @@ public class RecordsAPI extends BaseAPI
/**
* Reject the record given as parameter
*
* @param user the user declaring the document as record
* @param password the user's password
* @param user the user declaring the document as record
* @param password the user's password
* @param expectedStatusCode The expected return status code.
* @param recordName the record name
* @param reason reject reason
* @param recordName the record name
* @param reason reject reason
* @return The HTTP Response.
* @throws AssertionError If the expectedStatusCode was not returned.
*/
@@ -129,8 +129,8 @@ public class RecordsAPI extends BaseAPI
JSONObject requestParams = new JSONObject();
requestParams.put("name", "reject");
requestParams.put("nodeRef", recNodeRef);
requestParams.put("params", new JSONObject()
.put("reason", reason));
requestParams.put("params",new JSONObject()
.put("reason",reason));
return doPostJsonRequest(user, password, expectedStatusCode, requestParams, RM_ACTIONS_API);
}
@@ -221,7 +221,6 @@ public class RecordsAPI extends BaseAPI
* Uploads an electronic record
* <p>
* eg. of usage for creating record directly in Unfiled Records : uploadElectronicRecord(getAdminName(), getAdminPassword(), recordPropertiesStringMap, UNFILED_RECORDS_BREADCRUMB, DocumentType.HTML)
*
* @param username the username
* @param password the password
* @param properties a map of record properties and their values
@@ -242,7 +241,6 @@ public class RecordsAPI extends BaseAPI
* <li>eg. of usage in the case in which the record is inside a folder in Unfiled Records : deleteRecord(getAdminName(), getAdminPassword(), "f1 (2016-1472716888713)", UNFILED_RECORDS_BREADCRUMB, "unfiled records folder");
* <li>eg. of usage in the case in which the record is created directly in Unfiled Records : deleteRecord(getAdminName(), getAdminPassword(), "f1 (2016-1472716888713)", UNFILED_RECORDS_BREADCRUMB, "");
* </ul>
*
* @param username user's username
* @param password its password
* @param recordName the record name
@@ -269,7 +267,7 @@ public class RecordsAPI extends BaseAPI
* @param recordName the String with which the record name starts
* @return the record object in case it exists, null otherwise
*/
public CmisObject getRecord(String username, String password, String folderName, String recordName)
private CmisObject getRecord(String username, String password, String folderName, String recordName)
{
for (CmisObject record : contentService.getFolderObject(contentService.getCMISSession(username, password), RM_SITE_ID, folderName).getChildren())
{
@@ -330,9 +328,9 @@ public class RecordsAPI extends BaseAPI
/**
* Hide in place record
*
* @param user the user
* @param password the user's password
* @param nodeId the in place record node id
* @param user the user
* @param password the user's password
* @param nodeId the in place record node id
* @return The HTTP Response.
*/
public HttpResponse hideRecord(String user, String password, String nodeId)
@@ -346,7 +344,6 @@ public class RecordsAPI extends BaseAPI
return doPostJsonRequest(user, password, SC_OK, requestParams, ACTIONS_API);
}
/**
* Retrieves the record's nodeRef
*

View File

@@ -1,139 +0,0 @@
/*
* #%L
* Alfresco Records Management Module
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* -
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* -
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* -
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* -
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.rm.community.records;
import static org.alfresco.rest.rm.community.util.CommonTestUtils.generateTestPrefix;
import static org.alfresco.utility.data.RandomData.getRandomName;
import static org.alfresco.utility.report.log.Step.STEP;
import static org.springframework.http.HttpStatus.CREATED;
import static org.testng.Assert.assertNotNull;
import java.util.HashMap;
import java.util.Map;
import org.alfresco.rest.core.v0.BaseAPI.RMProperty;
import org.alfresco.rest.rm.community.base.BaseRMRestTest;
import org.alfresco.rest.rm.community.model.recordcategory.RecordCategory;
import org.alfresco.rest.rm.community.model.recordcategory.RecordCategoryChild;
import org.alfresco.rest.v0.RMRolesAndActionsAPI;
import org.alfresco.rest.v0.RecordsAPI;
import org.alfresco.test.AlfrescoTest;
import org.springframework.beans.factory.annotation.Autowired;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
/**
* Contains CreateNonElectronicRecords test which checks creation and basic actions(view details, edit, move, copy, delete) on non-electronic records
* <p/>
* Precondition:
* <p/>
* RM site created, contains category 1 with folder 1 and folder 2 inside it
* <p/>
* RM user has RM admin role
*
* @author Shubham Jain
* @Since 7.2.0 M2
*/
public class CreateNonElectronicRecordsTests extends BaseRMRestTest
{
private RecordCategory rootCategory;
private RecordCategoryChild recordFolder;
/**
* data prep services
*/
@Autowired
private RMRolesAndActionsAPI rmRolesAndActionsAPI;
@Autowired
private RecordsAPI recordsAPI;
private final String TEST_PREFIX = generateTestPrefix(CreateNonElectronicRecordsTests.class);
private final String RM_ADMIN = TEST_PREFIX + "rm_admin";
private final String recordName = "RM-2777 record";
private final String recordTitle = recordName + " title";
private final String recordDescription = recordName + " description";
@BeforeClass (alwaysRun = true)
public void preConditions()
{
STEP("Create RM Site");
createRMSiteIfNotExists();
STEP("Create RM Admin user");
rmRolesAndActionsAPI.createUserAndAssignToRole(getAdminUser().getUsername(), getAdminUser().getPassword(), RM_ADMIN,
getAdminUser().getPassword(),
"Administrator");
STEP("Create root level category");
rootCategory = createRootCategory(getRandomName("Category"));
STEP("Create the record folder inside the rootCategory");
recordFolder = createRecordFolder(rootCategory.getId(), getRandomName("Folder"));
}
/**
* Test v0 methods to create and get non-electronic records.
*/
@Test
@AlfrescoTest (jira = "RM-2777")
public void createNonElectronicRecordTest()
{
STEP("Create a non-electronic record by completing some of the fields");
Map<Enum<?>, String> properties = new HashMap<Enum<?>, String>();
properties.put(RMProperty.TITLE, recordTitle);
properties.put(RMProperty.DESCRIPTION, recordDescription);
properties.put(RMProperty.NAME, recordName);
properties.put(RMProperty.PHYSICAL_SIZE, "");
properties.put(RMProperty.NUMBER_OF_COPIES, "");
properties.put(RMProperty.SHELF, "");
properties.put(RMProperty.STORAGE_LOCATION, "");
properties.put(RMProperty.BOX, "");
properties.put(RMProperty.FILE, "");
recordsAPI.createNonElectronicRecord(getAdminUser().getUsername(),
getAdminUser().getPassword(), properties, rootCategory.getName(), recordFolder.getName());
STEP("Check the non-electronic record has been created");
assertStatusCode(CREATED);
assertNotNull(recordsAPI.getRecord(getAdminUser().getUsername(), getAdminUser().getPassword(),
recordFolder.getName(), recordName));
}
@AfterClass (alwaysRun = true)
public void deletePreConditions()
{
STEP("Delete the created rootCategory along with corresponding record folders/records present in it");
getRestAPIFactory().getRecordCategoryAPI().deleteRecordCategory(rootCategory.getId());
}
}

View File

@@ -1,6 +1,6 @@
<!DOCTYPE suite SYSTEM "http://testng.org/testng-1.0.dtd" >
<suite name="TestNG AllTestSuite" configfailurepolicy="continue" verbose="1" time-out="300000">
<suite name="TestNG AllTestSuite" configfailurepolicy="continue" verbose="1" time-out="300000" parallel="false">
<test name="restapi">
<packages>
<package name="org.alfresco.rest.rm.community.*"/>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-parent</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -1,4 +1,4 @@
TRANSFORMERS_TAG=2.5.7
SOLR6_TAG=2.0.3
TRANSFORMERS_TAG=2.5.7-A1
SOLR6_TAG=2.0.2
POSTGRES_TAG=13.3
ACTIVEMQ_TAG=5.16.1

View File

@@ -26,8 +26,7 @@ services:
-Ddb.url=jdbc:postgresql://postgres:5432/alfresco
-Dsolr.host=search
-Dsolr.port=8983
-Dsolr.secureComms=secret
-Dsolr.sharedSecret=secret
-Dsolr.secureComms=none
-Dsolr.base.url=/solr
-Dindex.subsystem.name=solr6
-Dalfresco.restApi.basicAuthScheme=true
@@ -62,19 +61,15 @@ services:
image: alfresco/alfresco-search-services:${SOLR6_TAG}
environment:
#Solr needs to know how to register itself with Alfresco
SOLR_ALFRESCO_HOST: "alfresco"
SOLR_ALFRESCO_PORT: "8080"
- SOLR_ALFRESCO_HOST=alfresco
- SOLR_ALFRESCO_PORT=8080
#Alfresco needs to know how to call solr
SOLR_SOLR_HOST: "search"
SOLR_SOLR_PORT: "8983"
- SOLR_SOLR_HOST=search
- SOLR_SOLR_PORT=8983
#Create the default alfresco and archive cores
SOLR_CREATE_ALFRESCO_DEFAULTS: "alfresco,archive"
#HTTP by default
ALFRESCO_SECURE_COMMS: "secret"
JAVA_TOOL_OPTIONS:
"
-Dalfresco.secureComms.secret=secret
"
- SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive
#HTTP by default
- ALFRESCO_SECURE_COMMS=none
ports:
- 8083:8983 #Browser port

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -66,8 +66,7 @@ ftp.enabled=false
# Solr config
index.subsystem.name=solr6
solr.secureComms=secret
solr.sharedSecret=secret
solr.secureComms=none
solr.port=8983
#By default the basic auth is on false - REPO-2575

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-governance-services-community-repo-parent</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<build>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-amps</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<dependencies>
@@ -56,7 +56,7 @@
<dependency>
<groupId>ch.qos.reload4j</groupId>
<artifactId>reload4j</artifactId>
<version>1.2.18.3</version>
<version>1.2.19</version>
</dependency>
<dependency>
<groupId>org.json</groupId>
@@ -128,8 +128,8 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-dbcp2</artifactId>
<groupId>commons-dbcp</groupId>
<artifactId>commons-dbcp</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@@ -21,7 +21,7 @@ package org.alfresco.util.transaction;
import org.alfresco.error.AlfrescoRuntimeException;
/**
* Exception wraps {@link java.util.NoSuchElementException} from {@link org.apache.commons.dbcp2.BasicDataSource}
* Exception wraps {@link java.util.NoSuchElementException} from {@link org.apache.commons.dbcp.BasicDataSource}
*
* @author alex.mukha
* @since 4.1.9

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -26,6 +26,7 @@
package org.alfresco.repo.content;
import org.alfresco.service.Experimental;
/**
* Enumeration with values for archive-restore parameter keys.
@@ -34,6 +35,7 @@ package org.alfresco.repo.content;
*
* @author mpichura
*/
@Experimental
public enum ContentRestoreParams
{
/**

View File

@@ -26,6 +26,7 @@
package org.alfresco.repo.content;
import org.alfresco.api.AlfrescoPublicApi;
import org.alfresco.service.Experimental;
import org.alfresco.service.cmr.repository.ContentAccessor;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
@@ -339,6 +340,7 @@ public interface ContentStore
* @param contentUrl the URL of the content for which the storage properties are to be retrieved.
* @return Returns a key-value (String-String) collection of storage headers/properties with their respective values.
*/
@Experimental
default Map<String, String> getStorageProperties(String contentUrl)
{
return Collections.emptyMap();
@@ -348,12 +350,14 @@ public interface ContentStore
* Submit a request to send content to archive (offline) state.
* If no connector is present or connector is not supporting sending to archive, then {@link UnsupportedOperationException} will be returned.
* Specific connector will decide which storage class/tier will be set for content.
* This method is experimental and subject to changes.
*
* @param contentUrl the URL of the content which is to be archived.
* @param archiveParams a map of String-Serializable parameters defining Storage Provider specific request parameters (can be empty).
* @return true when request successful, false when unsuccessful.
* @throws UnsupportedOperationException when store is unable to handle request.
*/
@Experimental
default boolean requestSendContentToArchive(String contentUrl, Map<String, Serializable> archiveParams)
{
throw new UnsupportedOperationException("Request to archive content is not supported by this content store.");
@@ -366,12 +370,14 @@ public interface ContentStore
* Keys of this map should be restricted to {@code ContentRestoreParams} enumeration.
* For AWS S3 map can indicating expiry days, Glacier restore tier.
* For Azure Blob map can indicate rehydrate priority.
* This method is experimental and subject to changes.
*
* @param contentUrl the URL of the content which is to be archived.
* @param restoreParams a map of String-Serializable parameters defining Storage Provider specific request parameters (can be empty).
* @return true when request successful, false when unsuccessful.
* @throws UnsupportedOperationException when store is unable to handle request.
*/
@Experimental
default boolean requestRestoreContentFromArchive(String contentUrl, Map<String, Serializable> restoreParams)
{
throw new UnsupportedOperationException("Request to restore content from archive is not supported by this content store.");

View File

@@ -26,6 +26,8 @@
package org.alfresco.repo.content;
import org.alfresco.service.Experimental;
/**
* Enumeration with "header" values for Alfresco derived Storage Properties
* Values of this enum should be used when adding Alfresco derived key-value pairs in Storage Properties map.
@@ -33,6 +35,7 @@ package org.alfresco.repo.content;
*
* @author mpichura
*/
@Experimental
public enum ObjectStorageProps {
/**
* Object's content is archived and not immediately accessible.

View File

@@ -26,6 +26,7 @@
package org.alfresco.service.cmr.repository;
import org.alfresco.api.AlfrescoPublicApi;
import org.alfresco.service.Experimental;
/**
* Unable to access as content is in an Archived state.
@@ -33,6 +34,7 @@ import org.alfresco.api.AlfrescoPublicApi;
*
* @author David Edwards
*/
@Experimental
@AlfrescoPublicApi
public class ArchivedIOException extends ContentIOException
{

View File

@@ -27,6 +27,7 @@ package org.alfresco.service.cmr.repository;
import org.alfresco.api.AlfrescoPublicApi;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.service.Experimental;
/**
* Content Restoration in progress
@@ -34,6 +35,7 @@ import org.alfresco.error.AlfrescoRuntimeException;
* @author David Edwards
*/
@AlfrescoPublicApi
@Experimental
public class RestoreInProgressException extends AlfrescoRuntimeException
{
private static final long serialVersionUID = 5483215922384016269L;

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -9,6 +9,6 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
</project>

View File

@@ -1,6 +1,6 @@
# Fetch image based on Tomcat 9.0, Java 11 and Centos 7
# Fetch image based on Tomcat 9.0, Java 11 and Centos 8
# More infos about this image: https://github.com/Alfresco/alfresco-docker-base-tomcat
FROM alfresco/alfresco-base-tomcat:tomcat9-jre11-centos7-202203091924
FROM alfresco/alfresco-base-tomcat:9.0.54-java-11-centos-7
# Set default docker_context.
ARG resource_path=target

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<properties>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -1,4 +1,4 @@
TRANSFORMERS_TAG=2.5.7
SOLR6_TAG=2.0.3
TRANSFORMERS_TAG=2.5.7-A1
SOLR6_TAG=2.0.2
POSTGRES_TAG=13.3
ACTIVEMQ_TAG=5.16.1

View File

@@ -28,8 +28,7 @@ services:
-Ddb.url=jdbc:postgresql://postgres:5432/alfresco
-Dsolr.host=solr6
-Dsolr.port=8983
-Dsolr.secureComms=secret
-Dsolr.sharedSecret=secret
-Dsolr.secureComms=none
-Dsolr.base.url=/solr
-Dindex.subsystem.name=solr6
-Dalfresco.restApi.basicAuthScheme=true
@@ -64,19 +63,15 @@ services:
image: alfresco/alfresco-search-services:${SOLR6_TAG}
environment:
#Solr needs to know how to register itself with Alfresco
SOLR_ALFRESCO_HOST: "alfresco"
SOLR_ALFRESCO_PORT: "8080"
- SOLR_ALFRESCO_HOST=alfresco
- SOLR_ALFRESCO_PORT=8080
#Alfresco needs to know how to call solr
SOLR_SOLR_HOST: "solr6"
SOLR_SOLR_PORT: "8983"
- SOLR_SOLR_HOST=solr6
- SOLR_SOLR_PORT=8983
#Create the default alfresco and archive cores
SOLR_CREATE_ALFRESCO_DEFAULTS: "alfresco,archive"
- SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive
#HTTP by default
ALFRESCO_SECURE_COMMS: "secret"
JAVA_TOOL_OPTIONS:
"
-Dalfresco.secureComms.secret=secret
"
- ALFRESCO_SECURE_COMMS=none
ports:
- 8083:8983 #Browser port

View File

@@ -28,8 +28,7 @@ services:
-Ddb.url=jdbc:postgresql://postgres:5432/alfresco
-Dsolr.host=solr6
-Dsolr.port=8983
-Dsolr.secureComms=secret
-Dsolr.sharedSecret=secret
-Dsolr.secureComms=none
-Dsolr.base.url=/solr
-Dindex.subsystem.name=solr6
-Dalfresco.restApi.basicAuthScheme=true
@@ -67,19 +66,15 @@ services:
image: alfresco/alfresco-search-services:${SOLR6_TAG}
environment:
#Solr needs to know how to register itself with Alfresco
SOLR_ALFRESCO_HOST: "alfresco"
SOLR_ALFRESCO_PORT: "8080"
- SOLR_ALFRESCO_HOST=alfresco
- SOLR_ALFRESCO_PORT=8080
#Alfresco needs to know how to call solr
SOLR_SOLR_HOST: "solr6"
SOLR_SOLR_PORT: "8983"
- SOLR_SOLR_HOST=solr6
- SOLR_SOLR_PORT=8983
#Create the default alfresco and archive cores
SOLR_CREATE_ALFRESCO_DEFAULTS: "alfresco,archive"
- SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive
#HTTP by default
ALFRESCO_SECURE_COMMS: "secret"
JAVA_TOOL_OPTIONS:
"
-Dalfresco.secureComms.secret=secret
"
- ALFRESCO_SECURE_COMMS=none
ports:
- 8083:8983 #Browser port

View File

@@ -6,7 +6,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<modules>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -9,7 +9,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-tests</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<developers>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo-packaging</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<properties>

42
pom.xml
View File

@@ -2,7 +2,7 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>alfresco-community-repo</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Alfresco Community Repo Parent</name>
@@ -47,11 +47,11 @@
<dependency.alfresco-trashcan-cleaner.version>2.4.1</dependency.alfresco-trashcan-cleaner.version>
<dependency.alfresco-jlan.version>7.1</dependency.alfresco-jlan.version>
<dependency.alfresco-server-root.version>6.0.1</dependency.alfresco-server-root.version>
<dependency.alfresco-messaging-repo.version>1.2.19</dependency.alfresco-messaging-repo.version>
<dependency.alfresco-messaging-repo.version>1.2.15</dependency.alfresco-messaging-repo.version>
<dependency.alfresco-log-sanitizer.version>0.2</dependency.alfresco-log-sanitizer.version>
<dependency.activiti-engine.version>5.23.0</dependency.activiti-engine.version>
<dependency.activiti.version>5.23.0</dependency.activiti.version>
<dependency.alfresco-transform-model.version>1.4.15</dependency.alfresco-transform-model.version>
<dependency.alfresco-transform-model.version>1.4.9</dependency.alfresco-transform-model.version>
<dependency.alfresco-greenmail.version>6.2</dependency.alfresco-greenmail.version>
<dependency.acs-event-model.version>0.0.13</dependency.acs-event-model.version>
@@ -65,7 +65,7 @@
<dependency.bouncycastle.version>1.70</dependency.bouncycastle.version>
<dependency.mockito-core.version>3.11.2</dependency.mockito-core.version>
<dependency.org-json.version>20211205</dependency.org-json.version>
<dependency.commons-dbcp.version>2.9.0</dependency.commons-dbcp.version>
<dependency.commons-dbcp.version>1.4-DBCP330</dependency.commons-dbcp.version>
<dependency.commons-io.version>2.11.0</dependency.commons-io.version>
<dependency.gson.version>2.8.5</dependency.gson.version>
<dependency.httpclient.version>4.5.13</dependency.httpclient.version>
@@ -73,16 +73,16 @@
<dependency.commons-httpclient.version>3.1-HTTPCLIENT-1265</dependency.commons-httpclient.version>
<dependency.xercesImpl.version>2.12.2</dependency.xercesImpl.version>
<dependency.slf4j.version>1.7.35</dependency.slf4j.version>
<dependency.gytheio.version>0.16</dependency.gytheio.version>
<dependency.gytheio.version>0.13</dependency.gytheio.version>
<dependency.groovy.version>3.0.9</dependency.groovy.version>
<dependency.tika.version>2.2.1</dependency.tika.version>
<dependency.tika.version>2.2.1</dependency.tika.version>
<dependency.spring-security.version>5.6.1</dependency.spring-security.version>
<dependency.truezip.version>7.7.10</dependency.truezip.version>
<dependency.poi.version>4.1.2</dependency.poi.version>
<dependency.ooxml-schemas.version>1.4</dependency.ooxml-schemas.version>
<dependency.keycloak.version>15.0.2</dependency.keycloak.version>
<dependency.jboss.logging.version>3.4.3.Final</dependency.jboss.logging.version>
<dependency.camel.version>3.15.0</dependency.camel.version> <!-- when bumping this version, please keep track/sync with included netty.io dependencies (can cause dependency conflicts)-->
<dependency.camel.version>3.7.7</dependency.camel.version> <!-- when bumping this version, please keep track/sync with included netty.io dependencies (can cause dependency conflicts)-->
<dependency.activemq.version>5.16.1</dependency.activemq.version>
<dependency.apache-compress.version>1.21</dependency.apache-compress.version>
<dependency.apache.taglibs.version>1.2.5</dependency.apache.taglibs.version>
@@ -106,11 +106,11 @@
<dependency.jakarta-rpc-api.version>1.1.4</dependency.jakarta-rpc-api.version>
<alfresco.googledrive.version>3.2.1.3</alfresco.googledrive.version>
<alfresco.aos-module.version>1.4.1</alfresco.aos-module.version>
<alfresco.api-explorer.version>7.2.0</alfresco.api-explorer.version> <!-- Also in alfresco-enterprise-share -->
<alfresco.aos-module.version>1.4.0.1</alfresco.aos-module.version>
<alfresco.api-explorer.version>7.2.0-A3</alfresco.api-explorer.version> <!-- Also in alfresco-enterprise-share -->
<alfresco.maven-plugin.version>2.2.0</alfresco.maven-plugin.version>
<license-maven-plugin.version>2.0.1.alfresco-2</license-maven-plugin.version>
<license-maven-plugin.version>2.0.1.alfresco-1</license-maven-plugin.version>
<dependency.postgresql.version>42.3.2</dependency.postgresql.version>
<dependency.mysql.version>8.0.27</dependency.mysql.version>
@@ -118,7 +118,7 @@
<dependency.mariadb.version>2.7.4</dependency.mariadb.version>
<dependency.tas-utility.version>3.0.47</dependency.tas-utility.version>
<dependency.rest-assured.version>3.3.0</dependency.rest-assured.version>
<dependency.tas-restapi.version>1.79</dependency.tas-restapi.version>
<dependency.tas-restapi.version>1.71</dependency.tas-restapi.version>
<dependency.tas-cmis.version>1.31</dependency.tas-cmis.version>
<dependency.tas-email.version>1.8</dependency.tas-email.version>
<dependency.tas-webdav.version>1.6</dependency.tas-webdav.version>
@@ -146,7 +146,7 @@
<connection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</connection>
<developerConnection>scm:git:https://github.com/Alfresco/alfresco-community-repo.git</developerConnection>
<url>https://github.com/Alfresco/alfresco-community-repo</url>
<tag>15.1</tag>
<tag>HEAD</tag>
</scm>
<distributionManagement>
@@ -755,8 +755,8 @@
<version>${dependency.mockito-core.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-dbcp2</artifactId>
<groupId>commons-dbcp</groupId>
<artifactId>commons-dbcp</artifactId>
<version>${dependency.commons-dbcp.version}</version>
</dependency>
<dependency>
@@ -766,12 +766,7 @@
</dependency>
<dependency>
<groupId>org.apache.camel</groupId>
<artifactId>camel-core</artifactId>
<version>${dependency.camel.version}</version>
</dependency>
<dependency>
<groupId>org.apache.camel</groupId>
<artifactId>camel-spring-xml</artifactId>
<artifactId>camel-spring</artifactId>
<version>${dependency.camel.version}</version>
<exclusions>
<exclusion>
@@ -800,6 +795,11 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-codec-http</artifactId>
<version>4.1.72.Final</version>
</dependency>
<dependency>
<!-- If you are going to bump dependency.camel.version, please check if the netty-codec-http has higher version that the one above.-->
<groupId>org.apache.camel</groupId>
@@ -945,7 +945,7 @@
<failOnMissing>true</failOnMissing>
<excludedScopes>provided,test</excludedScopes>
<excludedGroups>^(org\.alfresco|com\.alfresco|org\.activiti|org\.gytheio).*</excludedGroups>
<failIfWarning>true</failIfWarning>
<failIfWarning>false</failIfWarning>
<includedLicenses>
https://raw.githubusercontent.com/Alfresco/third-party-license-overrides/master/includedLicenses.txt
</includedLicenses>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<dependencies>

View File

@@ -30,37 +30,49 @@ import org.alfresco.rest.api.model.ArchiveContentRequest;
import org.alfresco.rest.api.model.ContentStorageInfo;
import org.alfresco.rest.api.model.RestoreArchivedContentRequest;
import org.alfresco.rest.framework.resource.parameters.Parameters;
import org.alfresco.service.Experimental;
import org.alfresco.service.cmr.repository.NodeRef;
/**
* Storage information for content API.
* Note: Currently marked as experimental and subject to change.
*
* @author mpichura
*/
@Experimental
public interface ContentStorageInformation
{
/**
* Note: Currently marked as experimental and subject to change.
*
* @param nodeRef Node reference
* @param contentPropName Qualified name of content property (e.g. 'cm_content')
* @param parameters {@link Parameters} object to get the parameters passed into the request
* @return {@link ContentStorageInfo} object consisting of qualified name of content property and a map of storage properties
*/
@Experimental
ContentStorageInfo getStorageInfo(NodeRef nodeRef, String contentPropName, Parameters parameters);
/**
* Note: Currently marked as experimental and subject to change.
*
* @param nodeRef Node reference
* @param contentPropName Qualified name of content property (e.g. 'cm_content')
* @param archiveContentRequest {@link ArchiveContentRequest} object holding parameters for archive content request
* @return true when request successful, false when unsuccessful
*/
@Experimental
boolean requestArchiveContent(NodeRef nodeRef, String contentPropName, ArchiveContentRequest archiveContentRequest);
/**
* Note: Currently marked as experimental and subject to change.
*
* @param nodeRef Node reference
* @param contentPropName Qualified name of content property (e.g. 'cm_content')
* @param restoreArchivedContentRequest {@link RestoreArchivedContentRequest} object holding parameters for restore from archive request
* @return true when request successful, false when unsuccessful
*/
@Experimental
boolean requestRestoreContentFromArchive(NodeRef nodeRef, String contentPropName,
RestoreArchivedContentRequest restoreArchivedContentRequest);
}

View File

@@ -33,6 +33,7 @@ import org.alfresco.rest.api.model.ContentStorageInfo;
import org.alfresco.rest.api.model.RestoreArchivedContentRequest;
import org.alfresco.rest.framework.core.exceptions.RestoreInProgressException;
import org.alfresco.rest.framework.resource.parameters.Parameters;
import org.alfresco.service.Experimental;
import org.alfresco.service.cmr.repository.ContentService;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.namespace.NamespaceService;
@@ -44,9 +45,11 @@ import java.util.Map;
/**
* Default implementation for {@link ContentStorageInformation}
* Note: Currently marked as experimental and subject to change.
*
* @author mpichura
*/
@Experimental
public class ContentStorageInformationImpl implements ContentStorageInformation
{
@@ -65,6 +68,7 @@ public class ContentStorageInformationImpl implements ContentStorageInformation
* {@inheritDoc}
*/
@Override
@Experimental
public ContentStorageInfo getStorageInfo(NodeRef nodeRef, String contentPropName, Parameters parameters)
{
final QName propQName = getQName(contentPropName);

View File

@@ -38,6 +38,7 @@ import org.alfresco.rest.api.Nodes;
import org.alfresco.rest.api.model.Download;
import org.alfresco.rest.framework.core.exceptions.InvalidArgumentException;
import org.alfresco.rest.framework.core.exceptions.PermissionDeniedException;
import org.alfresco.service.Experimental;
import org.alfresco.service.cmr.download.DownloadService;
import org.alfresco.service.cmr.download.DownloadStatus;
import org.alfresco.service.cmr.module.ModuleService;
@@ -213,7 +214,8 @@ public class DownloadsImpl implements Downloads
* @param checkLimit The maximum number of nodes to check, set to -1 for no limit
* @see #checkArchiveStatus(NodeRef[], int, Set)
*/
protected void checkArchiveStatus(NodeRef[] nodeRefs, int checkLimit)
@Experimental
protected void checkArchiveStatus(NodeRef[] nodeRefs, int checkLimit)
{
if (canCheckArchived())
{
@@ -232,6 +234,7 @@ public class DownloadsImpl implements Downloads
* @param checkLimit The maximum number of nodes to check, set to -1 for no limit
* @param cache Tracks nodes that we have already checked, if null an empty cache will be created
*/
@Experimental
private void checkArchiveStatus(NodeRef[] nodeRefs, int checkLimit, Set<NodeRef> cache)
{
// Create the cache for recursive calls.
@@ -288,7 +291,8 @@ public class DownloadsImpl implements Downloads
}
}
protected boolean canCheckArchived()
@Experimental
protected boolean canCheckArchived()
{
return Arrays.stream(CLOUD_CONNECTOR_MODULES).anyMatch(m-> moduleService.getModule(m) != null);
}

View File

@@ -3132,18 +3132,26 @@ public class NodesImpl implements Nodes
// Write content
writeContent(nodeRef, fileName, content.getInputStream(), true);
}
if ((versionMajor != null) || (versionComment != null))
{
// by default, first version is major, unless specified otherwise
VersionType versionType = VersionType.MAJOR;
if ((versionMajor != null) && (!versionMajor))
behaviourFilter.disableBehaviour(nodeRef, ContentModel.ASPECT_VERSIONABLE);
try
{
versionType = VersionType.MINOR;
}
// by default, first version is major, unless specified otherwise
VersionType versionType = VersionType.MAJOR;
if ((versionMajor != null) && (!versionMajor))
{
versionType = VersionType.MINOR;
}
createVersion(nodeRef, false, versionType, versionComment);
extractMetadata(nodeRef);
createVersion(nodeRef, false, versionType, versionComment);
extractMetadata(nodeRef);
} finally
{
behaviourFilter.enableBehaviour(nodeRef, ContentModel.ASPECT_VERSIONABLE);
}
}
return nodeRef;

View File

@@ -26,15 +26,19 @@
package org.alfresco.rest.api.model;
import org.alfresco.service.Experimental;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
/**
* Request for content archive.
* Marked as experimental and subject to change.
*
* @author mpichura
*/
@Experimental
public class ArchiveContentRequest
{
/**

View File

@@ -26,11 +26,15 @@
package org.alfresco.rest.api.model;
import org.alfresco.service.Experimental;
/**
* Request for restore content from archive.
* Marked as experimental and subject to change.
*
* @author mpichura
*/
@Experimental
public class RestoreArchivedContentRequest
{
/**

View File

@@ -39,6 +39,7 @@ import org.alfresco.rest.framework.resource.RelationshipResource;
import org.alfresco.rest.framework.resource.actions.interfaces.RelationshipResourceAction;
import org.alfresco.rest.framework.resource.parameters.Parameters;
import org.alfresco.rest.framework.webscripts.WithResponse;
import org.alfresco.service.Experimental;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.util.PropertyCheck;
@@ -48,9 +49,11 @@ import javax.servlet.http.HttpServletResponse;
/**
* Node storage information.
* Note: Currently marked as experimental and subject to change.
*
* @author mpichura
*/
@Experimental
@RelationshipResource(name = "storage-info", entityResource = NodesEntityResource.class, title = "Node's content storage information")
public class NodeStorageInfoRelation implements RelationshipResourceAction.ReadById<ContentStorageInfo>, InitializingBean
{
@@ -79,6 +82,7 @@ public class NodeStorageInfoRelation implements RelationshipResourceAction.ReadB
return storageInformation.getStorageInfo(nodeRef, contentPropName, parameters);
}
@Experimental
@Operation("archive")
@WebApiParam(name = "archiveContentRequest", title = "Request for archive content",
description = "Optional parameters for archive content", kind = ResourceParameter.KIND.HTTP_BODY_OBJECT)
@@ -99,6 +103,7 @@ public class NodeStorageInfoRelation implements RelationshipResourceAction.ReadB
}
}
@Experimental
@Operation("archive-restore")
@WebApiParam(name = "restoreArchivedContentRequest", title = "Request for restore content from archive",
description = "Optional parameters for restore content from archive", kind = ResourceParameter.KIND.HTTP_BODY_OBJECT)

View File

@@ -40,6 +40,7 @@ import org.alfresco.rest.framework.resource.RelationshipResource;
import org.alfresco.rest.framework.resource.actions.interfaces.RelationshipResourceAction;
import org.alfresco.rest.framework.resource.parameters.Parameters;
import org.alfresco.rest.framework.webscripts.WithResponse;
import org.alfresco.service.Experimental;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.version.Version;
import org.alfresco.util.PropertyCheck;
@@ -51,9 +52,12 @@ import javax.servlet.http.HttpServletResponse;
* Node Versions storage information.
*
* - GET /nodes/{nodeId}/versions/{versionId}/storage-info/{contentPropQNameId}
*
* Note: Currently marked as experimental and subject to change.
*
* @author janv
*/
@Experimental
@RelationshipResource(name = "storage-info", entityResource = NodeVersionsRelation.class, title = "Node Version's content storage information")
public class NodeVersionsStorageInfoRelation implements RelationshipResourceAction.ReadById<ContentStorageInfo>, InitializingBean
{
@@ -87,6 +91,7 @@ public class NodeVersionsStorageInfoRelation implements RelationshipResourceActi
return storageInformation.getStorageInfo(versionNodeRef, contentPropQNameId, parameters);
}
@Experimental
@Operation("archive")
@WebApiParam(name = "archiveContentRequest", title = "Request for archive version content",
description = "Optional parameters for archive version content", kind = ResourceParameter.KIND.HTTP_BODY_OBJECT)
@@ -111,6 +116,7 @@ public class NodeVersionsStorageInfoRelation implements RelationshipResourceActi
}
}
@Experimental
@Operation("archive-restore")
@WebApiParam(name = "restoreArchivedContentRequest", title = "Request for restore version content from archive",
description = "Optional parameters for restore version content from archive", kind = ResourceParameter.KIND.HTTP_BODY_OBJECT)

View File

@@ -25,12 +25,15 @@
*/
package org.alfresco.rest.framework.core.exceptions;
import org.alfresco.service.Experimental;
/**
* Thrown when the content is archived and not readily accessible.
* Status is <i>Precondition Failed</i> client error = 412.
*
* @author David Edwards
*/
@Experimental
public class ArchivedContentException extends ApiException
{

View File

@@ -25,6 +25,9 @@
*/
package org.alfresco.rest.framework.core.exceptions;
import org.alfresco.service.Experimental;
@Experimental
public class RestoreInProgressException extends ApiException
{
public static String DEFAULT_MESSAGE_ID = "framework.exception.RestoreInProgress";

View File

@@ -85,7 +85,8 @@ public class AlfrescoX509ServletFilter extends X509ServletFilterBase
throw new AlfrescoRuntimeException("Missing value for sharedSecretHeader");
}
}
/*
// TODO: Activate this part after OPSEXP-1163 got implemented
if(secureComms == SecureCommsType.NONE)
{
if(!"true".equalsIgnoreCase(config.getInitParameter("allow-unauthenticated-solr-endpoint")))
@@ -93,7 +94,7 @@ public class AlfrescoX509ServletFilter extends X509ServletFilterBase
throw new AlfrescoRuntimeException("solr.secureComms=none is no longer supported. Please use https or secret");
}
}
*/
super.init(config);
}

View File

@@ -357,13 +357,12 @@ function main()
updateNode.properties.content.write(content, updateNameAndMimetype, true, newFilename);
// check it in again, with supplied version history note
updateNode = updateNode.checkin(description, majorVersion);
// Extract the metadata
// (The overwrite policy controls which if any parts of
// the document's properties are updated from this)
extractMetadata(updateNode);
updateNode = updateNode.checkin(description, majorVersion);
if (aspects.length != 0)
{
for (i = 0; i < aspects.length; i++)

View File

@@ -51,8 +51,6 @@ import org.junit.runners.Suite;
org.alfresco.rest.api.tests.GroupsTest.class,
org.alfresco.rest.api.tests.ModulePackagesApiTest.class,
org.alfresco.rest.api.tests.NodeApiTest.class,
org.alfresco.rest.api.tests.NodeApiVersioningMultipartParameterizedTest.class,
org.alfresco.rest.api.tests.NodeApiVersioningJsonParameterizedTest.class,
org.alfresco.rest.api.tests.NodeAssociationsApiTest.class,
org.alfresco.rest.api.tests.NodeVersionsApiTest.class,
org.alfresco.rest.api.tests.NodeVersionRenditionsApiTest.class,

View File

@@ -40,7 +40,6 @@ import org.junit.runners.Suite;
org.alfresco.repo.web.scripts.solr.StatsGetTest.class,
org.alfresco.repo.web.scripts.solr.SOLRSerializerTest.class,
org.alfresco.repo.web.scripts.solr.SOLRAuthenticationFilterTest.class,
org.alfresco.web.app.servlet.AlfrescoX509ServletFilterTest.class,
org.alfresco.repo.web.util.PagingCursorTest.class,
org.alfresco.repo.web.util.paging.PagingTest.class,
org.alfresco.repo.webdav.GetMethodTest.class,

View File

@@ -1,179 +0,0 @@
/*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.api.tests;
import org.alfresco.rest.AbstractSingleNetworkSiteTest;
import org.alfresco.rest.api.tests.client.HttpResponse;
import org.alfresco.rest.api.tests.client.data.Document;
import org.alfresco.rest.api.tests.util.RestApiUtil;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.security.AuthorityService;
import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.namespace.NamespaceService;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.*;
import static org.alfresco.rest.api.tests.util.RestApiUtil.toJsonAsStringNonNull;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@RunWith(Parameterized.class)
public class NodeApiVersioningJsonParameterizedTest extends AbstractSingleNetworkSiteTest
{
private static final String TYPE_CM_CONTENT = "cm:content";
private static final String TYPE_CUSTOM_DOCUMENT = "custom:document";
private static final String VERSIONING_ENABLED_TRUE = "true";
private static final String VERSIONING_ENABLED_FALSE = "false";
private static final String VERSIONING_ENABLED_UNSET = null;
private static final Boolean MAJOR_VERSION_ENABLED_TRUE = true;
private static final Boolean MAJOR_VERSION_ENABLED_FALSE = false;
private static final Boolean MAJOR_VERSION_ENABLED_UNSET = null;
private static final String VERSION_NOT_EXPECTED = null;
private static final String EXPECTED_VERSION_0_1 = "0.1";
private static final String EXPECTED_VERSION_1_0 = "1.0";
private static final String EXPECTED_ASPECT_VERSIONABLE = "cm:versionable";
private static final String EXPECTED_ASPECT_NONE = null;
protected PermissionService permissionService;
protected AuthorityService authorityService;
private NodeService nodeService;
private NamespaceService namespaceService;
@Parameterized.Parameter(value = 0)
public String type;
@Parameterized.Parameter(value = 1)
public String versioningEnabled;
@Parameterized.Parameter(value = 2)
public Boolean majorVersion;
@Parameterized.Parameter(value = 3)
public String expectedVersion;
@Parameterized.Parameter(value = 4)
public String expectedAspect;
@Parameterized.Parameters //parameters source - MMT-22462 comments
public static Collection<Object[]> data()
{
Collection<Object[]> params = new ArrayList();
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_UNSET, MAJOR_VERSION_ENABLED_UNSET, VERSION_NOT_EXPECTED, EXPECTED_ASPECT_NONE});
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_UNSET, MAJOR_VERSION_ENABLED_TRUE, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_NONE});
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_UNSET, MAJOR_VERSION_ENABLED_FALSE, EXPECTED_VERSION_0_1, EXPECTED_ASPECT_NONE});
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_FALSE, MAJOR_VERSION_ENABLED_UNSET, VERSION_NOT_EXPECTED, EXPECTED_ASPECT_NONE});
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_FALSE, MAJOR_VERSION_ENABLED_TRUE, VERSION_NOT_EXPECTED, EXPECTED_ASPECT_NONE});
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_FALSE, MAJOR_VERSION_ENABLED_FALSE, VERSION_NOT_EXPECTED, EXPECTED_ASPECT_NONE});
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_TRUE, MAJOR_VERSION_ENABLED_UNSET, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_TRUE, MAJOR_VERSION_ENABLED_TRUE, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_TRUE, MAJOR_VERSION_ENABLED_FALSE, EXPECTED_VERSION_0_1, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_UNSET, MAJOR_VERSION_ENABLED_UNSET, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_UNSET, MAJOR_VERSION_ENABLED_TRUE, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_UNSET, MAJOR_VERSION_ENABLED_FALSE, EXPECTED_VERSION_0_1, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_TRUE, MAJOR_VERSION_ENABLED_UNSET, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_TRUE, MAJOR_VERSION_ENABLED_TRUE, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_TRUE, MAJOR_VERSION_ENABLED_FALSE, EXPECTED_VERSION_0_1, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_FALSE, MAJOR_VERSION_ENABLED_UNSET, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_FALSE, MAJOR_VERSION_ENABLED_TRUE, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_FALSE, MAJOR_VERSION_ENABLED_FALSE, EXPECTED_VERSION_0_1, EXPECTED_ASPECT_VERSIONABLE});
return params;
}
@Before
public void setup() throws Exception
{
super.setup();
permissionService = applicationContext.getBean("permissionService", PermissionService.class);
authorityService = (AuthorityService) applicationContext.getBean("AuthorityService");
nodeService = applicationContext.getBean("NodeService", NodeService.class);
namespaceService= (NamespaceService) applicationContext.getBean("NamespaceService");
}
@After
public void tearDown() throws Exception
{
super.tearDown();
}
@Test
public void versionableDocumentJsonNodeCreationTest() throws Exception
{
setRequestContext(user1);
String myNodeId = getMyNodeId();
Document d1 = new Document();
Map<String, String> params = new HashMap<>();
d1.setName("testDoc" + UUID.randomUUID());
d1.setNodeType(type);
if(versioningEnabled != null)
{
params.put("versioningEnabled", versioningEnabled);
}
if(majorVersion != null)
{
params.put("majorVersion", majorVersion.toString());
}
HttpResponse response = post(getNodeChildrenUrl(myNodeId), toJsonAsStringNonNull(d1), params, null, null, 201);
Document documentResponse = RestApiUtil.parseRestApiEntry(response.getJsonResponse(), Document.class);
assertExpectedVersion(expectedVersion, documentResponse.getProperties());
assertContainsAspect(expectedAspect, documentResponse);
}
private void assertExpectedVersion(String expectedVersion, Map<String, Object> documentProperties)
{
if(documentProperties != null) {
assertEquals(expectedVersion, documentProperties.get("cm:versionLabel"));
}
}
private void assertContainsAspect(String expectedAspect, Document documentResponse)
{
if(expectedAspect != null) {
assertTrue(!documentResponse.getAspectNames().isEmpty());
assertTrue(documentResponse.getAspectNames().contains(expectedAspect));
}
}
}

View File

@@ -1,183 +0,0 @@
/*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.api.tests;
import org.alfresco.rest.AbstractSingleNetworkSiteTest;
import org.alfresco.rest.api.tests.client.HttpResponse;
import org.alfresco.rest.api.tests.client.data.Document;
import org.alfresco.rest.api.tests.util.MultiPartBuilder;
import org.alfresco.rest.api.tests.util.RestApiUtil;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.security.AuthorityService;
import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.namespace.NamespaceService;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
import java.util.UUID;
import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
@RunWith(Parameterized.class)
public class NodeApiVersioningMultipartParameterizedTest extends AbstractSingleNetworkSiteTest
{
private static final String TYPE_CM_CONTENT = "cm:content";
private static final String TYPE_CUSTOM_DOCUMENT = "custom:document";
private static final String VERSIONING_ENABLED_TRUE = "true";
private static final String VERSIONING_ENABLED_FALSE = "false";
private static final String VERSIONING_ENABLED_UNSET = null;
private static final Boolean MAJOR_VERSION_ENABLED_TRUE = true;
private static final Boolean MAJOR_VERSION_ENABLED_FALSE = false;
private static final Boolean MAJOR_VERSION_ENABLED_UNSET = null;
private static final String VERSION_NOT_EXPECTED = null;
private static final String EXPECTED_VERSION_0_1 = "0.1";
private static final String EXPECTED_VERSION_1_0 = "1.0";
private static final String EXPECTED_ASPECT_VERSIONABLE = "cm:versionable";
private static final String EXPECTED_ASPECT_NONE = null;
protected PermissionService permissionService;
protected AuthorityService authorityService;
private NodeService nodeService;
private NamespaceService namespaceService;
@Parameterized.Parameter(value = 0)
public String type;
@Parameterized.Parameter(value = 1)
public String versioningEnabled;
@Parameterized.Parameter(value = 2)
public Boolean majorVersion;
@Parameterized.Parameter(value = 3)
public String expectedVersion;
@Parameterized.Parameter(value = 4)
public String expectedAspect;
@Parameterized.Parameters //parameters source - MMT-22462 comments
public static Collection<Object[]> data()
{
Collection<Object[]> params = new ArrayList();
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_UNSET, MAJOR_VERSION_ENABLED_UNSET, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_UNSET, MAJOR_VERSION_ENABLED_TRUE, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_UNSET, MAJOR_VERSION_ENABLED_FALSE, EXPECTED_VERSION_0_1, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_TRUE, MAJOR_VERSION_ENABLED_UNSET, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_TRUE, MAJOR_VERSION_ENABLED_TRUE, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_TRUE, MAJOR_VERSION_ENABLED_FALSE, EXPECTED_VERSION_0_1, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_FALSE, MAJOR_VERSION_ENABLED_UNSET, VERSION_NOT_EXPECTED, EXPECTED_ASPECT_NONE});
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_FALSE, MAJOR_VERSION_ENABLED_TRUE, VERSION_NOT_EXPECTED, EXPECTED_ASPECT_NONE});
params.add(new Object[]{TYPE_CM_CONTENT, VERSIONING_ENABLED_FALSE, MAJOR_VERSION_ENABLED_FALSE, VERSION_NOT_EXPECTED, EXPECTED_ASPECT_NONE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_UNSET, MAJOR_VERSION_ENABLED_UNSET, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_UNSET, MAJOR_VERSION_ENABLED_TRUE, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_UNSET, MAJOR_VERSION_ENABLED_FALSE, EXPECTED_VERSION_0_1, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_TRUE, MAJOR_VERSION_ENABLED_UNSET, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_TRUE, MAJOR_VERSION_ENABLED_TRUE, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_TRUE, MAJOR_VERSION_ENABLED_FALSE, EXPECTED_VERSION_0_1, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_FALSE, MAJOR_VERSION_ENABLED_UNSET, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_FALSE, MAJOR_VERSION_ENABLED_TRUE, EXPECTED_VERSION_1_0, EXPECTED_ASPECT_VERSIONABLE});
params.add(new Object[]{TYPE_CUSTOM_DOCUMENT, VERSIONING_ENABLED_FALSE, MAJOR_VERSION_ENABLED_FALSE, EXPECTED_VERSION_0_1, EXPECTED_ASPECT_VERSIONABLE});
return params;
}
@Before
public void setup() throws Exception
{
super.setup();
permissionService = applicationContext.getBean("permissionService", PermissionService.class);
authorityService = (AuthorityService) applicationContext.getBean("AuthorityService");
nodeService = applicationContext.getBean("NodeService", NodeService.class);
namespaceService= (NamespaceService) applicationContext.getBean("NamespaceService");
}
@After
public void tearDown() throws Exception
{
super.tearDown();
}
@Test
public void versionableDocumentMultipartNodeCreationTest() throws Exception
{
setRequestContext(user1);
String myNodeId = getMyNodeId();
String fileName = "myfile" + UUID.randomUUID() + ".txt";
File file = getResourceFile("quick-2.pdf");
MultiPartBuilder multiPartBuilder = MultiPartBuilder.create().setFileData(new MultiPartBuilder.FileData(fileName, file));
multiPartBuilder.setNodeType(type);
multiPartBuilder.setVersioningEnabled(versioningEnabled);
if(majorVersion != null)
{
multiPartBuilder.setMajorVersion(majorVersion);
}
MultiPartBuilder.MultiPartRequest reqBody = multiPartBuilder.build();
HttpResponse response = post(getNodeChildrenUrl(myNodeId), reqBody.getBody(), null, reqBody.getContentType(), 201);
Document documentResponse = RestApiUtil.parseRestApiEntry(response.getJsonResponse(), Document.class);
Map<String, Object> documentProperties = documentResponse.getProperties();
assertExpectedVersion(expectedVersion, documentProperties);
assertContainsAspect(expectedAspect, documentResponse);
}
private void assertExpectedVersion(String expectedVersion, Map<String, Object> documentProperties)
{
if(documentProperties != null) {
assertEquals(expectedVersion, documentProperties.get("cm:versionLabel"));
}
}
private void assertContainsAspect(String expectedAspect, Document documentResponse)
{
if(expectedAspect != null) {
assertTrue(!documentResponse.getAspectNames().isEmpty());
assertTrue(documentResponse.getAspectNames().contains(expectedAspect));
}
}
}

View File

@@ -1,190 +0,0 @@
/*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.web.app.servlet;
import static junit.framework.TestCase.assertEquals;
import javax.servlet.FilterConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import java.util.Properties;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.httpclient.HttpClientFactory.SecureCommsType;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.WebApplicationContextUtils;
/**
* Unit tests for {@link AlfrescoX509ServletFilter}.
*/
public class AlfrescoX509ServletFilterTest
{
private static final String BEAN_GLOBAL_PROPERTIES = "global-properties";
private static final String PROP_SECURE_COMMS = "solr.secureComms";
private static final String PROP_SHARED_SECRET = "solr.sharedSecret";
private static final String PROP_SHARED_SECRET_HEADER = "solr.sharedSecret.header";
private static final String SHARED_SECRET_HEADER = "X-Alfresco-Search-Secret";
private static final String SECRET = "secret";
private static final String ALLOW_UNAUTHORIZED_SOLR_ENDPOINT = "allow-unauthenticated-solr-endpoint";
private static final String MISSING_SHARED_SECRET_EXCEPTION_MSG = "Missing value for solr.sharedSecret configuration property";
private static final String MISSING_SHARED_SECRET_HEADER_EXCEPTION_MSG = "Missing value for sharedSecretHeader";
private static final String SECURE_COMMS_NONE_IS_NOT_SUPPORTED_EXCEPTION_MSG = "solr.secureComms=none is no longer supported. Please use https or secret";
private FilterConfig filterConfig;
private Properties globalProperties;
private AlfrescoX509ServletFilter filter;
@Before
public void before()
{
FilterConfig filterConfig = Mockito.mock(FilterConfig.class);
WebApplicationContext webApplicationContext = Mockito.mock(WebApplicationContext.class);
ServletContext servletContext = Mockito.mock(ServletContext.class);
Properties globalProperties = Mockito.mock(Properties.class);
Mockito.when(servletContext.getAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE)).thenReturn(webApplicationContext);
Mockito.when(filterConfig.getServletContext()).thenReturn(servletContext);
Mockito.when(WebApplicationContextUtils.getRequiredWebApplicationContext(filterConfig.getServletContext())).thenReturn(webApplicationContext);
Mockito.when(webApplicationContext.getBean(BEAN_GLOBAL_PROPERTIES)).thenReturn(globalProperties);
this.filterConfig = filterConfig;
this.globalProperties = globalProperties;
this.filter = new AlfrescoX509ServletFilter();
}
@Test (expected = AlfrescoRuntimeException.class)
public void testSharedSecretIsEmpty() throws ServletException
{
Mockito.when(globalProperties.getProperty(PROP_SECURE_COMMS)).thenReturn(SecureCommsType.SECRET.name());
Mockito.when(globalProperties.getProperty(PROP_SHARED_SECRET)).thenReturn("");
Mockito.when(globalProperties.getProperty(PROP_SHARED_SECRET_HEADER)).thenReturn(SHARED_SECRET_HEADER);
try
{
filter.init(filterConfig);
}
catch (AlfrescoRuntimeException ex)
{
assertEquals(MISSING_SHARED_SECRET_EXCEPTION_MSG, ex.getMsgId());
throw ex;
}
}
@Test (expected = AlfrescoRuntimeException.class)
public void testSharedSecretIsNull() throws ServletException
{
Mockito.when(globalProperties.getProperty(PROP_SECURE_COMMS)).thenReturn(SecureCommsType.SECRET.name());
Mockito.when(globalProperties.getProperty(PROP_SHARED_SECRET)).thenReturn(null);
Mockito.when(globalProperties.getProperty(PROP_SHARED_SECRET_HEADER)).thenReturn(SHARED_SECRET_HEADER);
try
{
filter.init(filterConfig);
}
catch (AlfrescoRuntimeException ex)
{
assertEquals(MISSING_SHARED_SECRET_EXCEPTION_MSG, ex.getMsgId());
throw ex;
}
}
@Test (expected = AlfrescoRuntimeException.class)
public void testSharedSecretHeaderIsEmpty() throws ServletException
{
Mockito.when(globalProperties.getProperty(PROP_SECURE_COMMS)).thenReturn(SecureCommsType.SECRET.name());
Mockito.when(globalProperties.getProperty(PROP_SHARED_SECRET)).thenReturn(SECRET);
Mockito.when(globalProperties.getProperty(PROP_SHARED_SECRET_HEADER)).thenReturn("");
try
{
filter.init(filterConfig);
}
catch (AlfrescoRuntimeException ex)
{
assertEquals(MISSING_SHARED_SECRET_HEADER_EXCEPTION_MSG, ex.getMsgId());
throw ex;
}
}
@Test (expected = AlfrescoRuntimeException.class)
public void testSharedSecretHeaderIsNull() throws ServletException
{
Mockito.when(globalProperties.getProperty(PROP_SECURE_COMMS)).thenReturn(SecureCommsType.SECRET.name());
Mockito.when(globalProperties.getProperty(PROP_SHARED_SECRET)).thenReturn(SECRET);
Mockito.when(globalProperties.getProperty(PROP_SHARED_SECRET_HEADER)).thenReturn("");
try
{
filter.init(filterConfig);
}
catch (AlfrescoRuntimeException ex)
{
assertEquals(MISSING_SHARED_SECRET_HEADER_EXCEPTION_MSG, ex.getMsgId());
throw ex;
}
}
@Test
public void testSharedSecretProperlyConfigured() throws ServletException
{
Mockito.when(globalProperties.getProperty(PROP_SECURE_COMMS)).thenReturn(SecureCommsType.SECRET.name());
Mockito.when(globalProperties.getProperty(PROP_SHARED_SECRET)).thenReturn(SECRET);
Mockito.when(globalProperties.getProperty(PROP_SHARED_SECRET_HEADER)).thenReturn(SHARED_SECRET_HEADER);
filter.init(filterConfig);
}
@Test (expected = AlfrescoRuntimeException.class)
public void testSecureCommsNoneAndNotAllowUnauthenticatedSolrEndpoint() throws ServletException
{
Mockito.when(globalProperties.getProperty(PROP_SECURE_COMMS)).thenReturn(SecureCommsType.NONE.name());
Mockito.when(filterConfig.getInitParameter(ALLOW_UNAUTHORIZED_SOLR_ENDPOINT)).thenReturn("false");
try
{
filter.init(filterConfig);
}
catch (AlfrescoRuntimeException ex)
{
assertEquals(SECURE_COMMS_NONE_IS_NOT_SUPPORTED_EXCEPTION_MSG, ex.getMsgId());
throw ex;
}
}
@Test
public void testSecureCommsNoneAndAllowUnauthenticatedSolrEndpoint() throws ServletException
{
Mockito.when(globalProperties.getProperty(PROP_SECURE_COMMS)).thenReturn(SecureCommsType.NONE.name());
Mockito.when(filterConfig.getInitParameter(ALLOW_UNAUTHORIZED_SOLR_ENDPOINT)).thenReturn("true");
filter.init(filterConfig);
}
}

View File

@@ -54,20 +54,7 @@
</property>
</properties>
</type>
<type name="custom:document">
<title>Sample Document Type</title>
<parent>cm:content</parent>
<properties>
<property name="custom:sample">
<type>d:text</type>
</property>
</properties>
<mandatory-aspects>
<aspect>cm:versionable</aspect>
</mandatory-aspects>
</type>
</types>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-community-repo</artifactId>
<version>15.1</version>
<version>14.111-SNAPSHOT</version>
</parent>
<dependencies>
@@ -77,8 +77,8 @@
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-dbcp2</artifactId>
<groupId>commons-dbcp</groupId>
<artifactId>commons-dbcp</artifactId>
</dependency>
<dependency>
<groupId>commons-fileupload</groupId>
@@ -418,7 +418,7 @@
<dependency>
<groupId>org.mybatis</groupId>
<artifactId>mybatis-spring</artifactId>
<version>2.0.7</version>
<version>2.0.6</version>
</dependency>
<!-- Activiti -->
@@ -656,11 +656,7 @@
</dependency>
<dependency>
<groupId>org.apache.camel</groupId>
<artifactId>camel-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.camel</groupId>
<artifactId>camel-spring-xml</artifactId>
<artifactId>camel-spring</artifactId>
</dependency>
<dependency>
<groupId>org.apache.camel</groupId>

View File

@@ -51,7 +51,7 @@ import org.alfresco.service.cmr.workflow.WorkflowAdminService;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.traitextender.SpringExtensionBundle;
import org.alfresco.util.PropertyCheck;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.InitializingBean;
@@ -89,7 +89,7 @@ import javax.sql.DataSource;
* </li>
* <li><b>db:</b> Database configuration
* <ul>
* <li>maxConnections: int - The maximum number of active connections. {@link BasicDataSource#getMaxTotal()}</li>
* <li>maxConnections: int - The maximum number of active connections. {@link BasicDataSource#getMaxActive()}</li>
* </ul>
* </li>
* <li><b>authentication</b>: Authentication configuration.
@@ -326,7 +326,7 @@ public class ConfigurationDataCollector extends HBBaseDataCollector implements I
if (dataSource instanceof BasicDataSource)
{
Map<String, Object> db = new HashMap<>();
db.put("maxConnections", ((BasicDataSource) dataSource).getMaxTotal());
db.put("maxConnections", ((BasicDataSource) dataSource).getMaxActive());
configurationValues.put("db", db);
}

View File

@@ -31,7 +31,7 @@ import org.alfresco.heartbeat.datasender.HBData;
import org.alfresco.heartbeat.jobs.HeartBeatJobScheduler;
import org.alfresco.repo.descriptor.DescriptorDAO;
import org.alfresco.util.PropertyCheck;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.InitializingBean;

View File

@@ -35,6 +35,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.cache.SimpleCache;
import org.alfresco.service.Experimental;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentWriter;
@@ -424,6 +425,7 @@ public abstract class AbstractRoutingContentStore implements ContentStore
* {@inheritDoc}
*/
@Override
@Experimental
public Map<String, String> getStorageProperties(String contentUrl)
{
ContentStore contentStore = selectReadStore(contentUrl);
@@ -443,6 +445,7 @@ public abstract class AbstractRoutingContentStore implements ContentStore
* {@inheritDoc}
*/
@Override
@Experimental
public boolean requestSendContentToArchive(String contentUrl, Map<String, Serializable> archiveParams)
{
final ContentStore contentStore = selectReadStore(contentUrl);
@@ -460,6 +463,7 @@ public abstract class AbstractRoutingContentStore implements ContentStore
* {@inheritDoc}
*/
@Override
@Experimental
public boolean requestRestoreContentFromArchive(String contentUrl, Map<String, Serializable> restoreParams)
{
final ContentStore contentStore = selectReadStore(contentUrl);

View File

@@ -46,6 +46,7 @@ import org.alfresco.repo.policy.ClassPolicyDelegate;
import org.alfresco.repo.policy.JavaBehaviour;
import org.alfresco.repo.policy.PolicyComponent;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.service.Experimental;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.dictionary.InvalidTypeException;
@@ -667,6 +668,7 @@ public class ContentServiceImpl implements ContentService, ApplicationContextAwa
* {@inheritDoc}
*/
@Override
@Experimental
public Map<String, String> getStorageProperties(NodeRef nodeRef, QName propertyQName)
{
final ContentData contentData = getContentDataOrThrowError(nodeRef, propertyQName);

View File

@@ -37,6 +37,7 @@ import org.alfresco.repo.content.caching.quota.QuotaManagerStrategy;
import org.alfresco.repo.content.caching.quota.UnlimitedQuotaStrategy;
import org.alfresco.repo.content.filestore.FileContentStore;
import org.alfresco.repo.content.filestore.SpoofedTextContentReader;
import org.alfresco.service.Experimental;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentStreamListener;
@@ -386,6 +387,7 @@ public class CachingContentStore implements ContentStore, ApplicationEventPublis
* {@inheritDoc}
*/
@Override
@Experimental
public Map<String, String> getStorageProperties(final String contentUrl)
{
return backingStore.getStorageProperties(contentUrl);
@@ -395,6 +397,7 @@ public class CachingContentStore implements ContentStore, ApplicationEventPublis
* {@inheritDoc}
*/
@Override
@Experimental
public boolean requestSendContentToArchive(String contentUrl, Map<String, Serializable> archiveParams)
{
return backingStore.requestSendContentToArchive(contentUrl, archiveParams);
@@ -404,6 +407,7 @@ public class CachingContentStore implements ContentStore, ApplicationEventPublis
* {@inheritDoc}
*/
@Override
@Experimental
public boolean requestRestoreContentFromArchive(String contentUrl, Map<String, Serializable> restoreParams)
{
return backingStore.requestRestoreContentFromArchive(contentUrl, restoreParams);

View File

@@ -40,6 +40,7 @@ import org.alfresco.repo.content.ContentContext;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.content.UnsupportedContentUrlException;
import org.alfresco.repo.content.caching.CachingContentStore;
import org.alfresco.service.Experimental;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentWriter;
@@ -402,6 +403,7 @@ public class AggregatingContentStore extends AbstractContentStore
* {@inheritDoc}
*/
@Override
@Experimental
public Map<String, String> getStorageProperties(String contentUrl)
{
checkPrimaryStore();
@@ -455,6 +457,7 @@ public class AggregatingContentStore extends AbstractContentStore
/**
* {@inheritDoc}
*/
@Experimental
@Override
public boolean requestSendContentToArchive(final String contentUrl, Map<String, Serializable> archiveParams)
{
@@ -464,6 +467,7 @@ public class AggregatingContentStore extends AbstractContentStore
/**
* {@inheritDoc}
*/
@Experimental
@Override
public boolean requestRestoreContentFromArchive(final String contentUrl, final Map<String, Serializable> restoreParams)
{

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2019-2022 Alfresco Software Limited
* Copyright (C) 2019 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -39,8 +39,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.alfresco.transform.client.util.RequestParamMap.DIRECT_ACCESS_URL;
/**
* Transformer that passes a document through a pipeline of transformations to arrive at an target mimetype.
*
@@ -120,8 +118,6 @@ public class LocalPipelineTransform extends AbstractLocalTransform
transformer.intermediateTransformer.transform(currentReader, currentWriter, transformOptions, renditionName, sourceNodeRef);
removeDirectAccessUrlAfterFirstTransform(transformOptions);
// Clear the sourceNodeRef after the first transformation to avoid later transformers thinking the
// intermediate file is the original node.
if (i == 0)
@@ -136,9 +132,4 @@ public class LocalPipelineTransform extends AbstractLocalTransform
}
}
}
private void removeDirectAccessUrlAfterFirstTransform(Map<String, String> transformOptions)
{
transformOptions.remove(DIRECT_ACCESS_URL);
}
}

View File

@@ -483,7 +483,7 @@ public class LocalTransformServiceRegistry extends TransformServiceRegistryImpl
* @param transform the local transform.
* @return {@code true} is supported, {@code false} otherwise.
*/
public boolean isSupported(CoreFunction function, LocalTransform transform)
boolean isSupported(CoreFunction function, LocalTransform transform)
{
return isSupported(function, transform.getName());
}

View File

@@ -35,7 +35,6 @@ import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;

View File

@@ -29,7 +29,6 @@ import java.io.Serializable;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
@@ -945,49 +944,4 @@ public interface NodeDAO extends NodeBulkLoader
*/
public Long getNextTxCommitTime(Long fromCommitTime);
/**
*
* @param maxCommitTime
* @return Iterator over node ids
*/
default public Iterator<Long> selectDeletedNodesByCommitTime(long maxCommitTime)
{
throw new UnsupportedOperationException("Not Implemented");
}
/**
* Purge the nodes marked as deleted
* @param minAge
* @param deleteBatchSize
* @return the count of nodes deleted in each batch
*/
default public List<String> purgeDeletedNodes(long minAge, int deleteBatchSize)
{
throw new UnsupportedOperationException("This operation is not supported");
}
/**
*
* @param maxCommitTime
* @return Iterator over transaction ids
*/
default public Iterator<Long> selectUnusedTransactionsByCommitTime(long maxCommitTime)
{
throw new UnsupportedOperationException("Not Implemented");
}
/**
* Purge the transactions of purged nodes
* @param minAge
* @param deleteBatchSize
* @return the count of transactions deleted in each batch
*/
default public List<String> purgeEmptyTransactions(long minAge, int deleteBatchSize)
{
throw new UnsupportedOperationException("This operation is not supported");
}
}

View File

@@ -25,6 +25,16 @@
*/
package org.alfresco.repo.domain.node.ibatis;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.ibatis.IdsEntity;
import org.alfresco.model.ContentModel;
@@ -55,7 +65,6 @@ import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.Pair;
import org.apache.ibatis.cursor.Cursor;
import org.apache.ibatis.executor.result.DefaultResultContext;
import org.apache.ibatis.session.ResultContext;
import org.apache.ibatis.session.ResultHandler;
@@ -63,17 +72,6 @@ import org.apache.ibatis.session.RowBounds;
import org.mybatis.spring.SqlSessionTemplate;
import org.springframework.util.Assert;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
/**
* iBatis-specific extension of the Node abstract DAO
@@ -168,12 +166,6 @@ public class NodeDAOImpl extends AbstractNodeDAOImpl
private static final String SELECT_TXN_MIN_TX_ID_IN_NODE_IDRANGE = "alfresco.node.select_TxnMinTxIdInNodeIdRange";
private static final String SELECT_TXN_MAX_TX_ID_IN_NODE_IDRANGE = "alfresco.node.select_TxnMaxTxIdInNodeIdRange";
private static final String SELECT_TXN_NEXT_TXN_COMMIT_TIME = "select_TxnNextTxnCommitTime";
private static final String SELECT_NODES_DELETED_BY_TXN_COMMIT_TIME = "alfresco.node.select.select_Deleted_NodesByTxnCommitTime";
private static final String DELETE_NODES_BY_ID = "alfresco.node.delete_NodesById";
private static final String DELETE_NODE_PROPS_BY_NODE_ID = "alfresco.node.delete_NodePropsByNodeId";
private static final String SELECT_TXNS_UNUSED_BY_TXN_COMMIT_TIME = "alfresco.node.select.select_Txns_UnusedByTxnCommitTime";
private static final String DELETE_TXNS_UNUSED_BY_ID = "alfresco.node.delete_Txns_UnusedById";
protected QNameDAO qnameDAO;
protected DictionaryService dictionaryService;
@@ -1802,138 +1794,8 @@ public class NodeDAOImpl extends AbstractNodeDAOImpl
return template.selectOne(SELECT_TXN_NEXT_TXN_COMMIT_TIME, fromCommitTimeEntity);
}
public Iterator<Long> selectDeletedNodesByCommitTime(long maxCommitTime)
{
// Get the deleted nodes
Pair<Long, QName> deletedTypePair = qnameDAO.getQName(ContentModel.TYPE_DELETED);
if (deletedTypePair == null)
{
// Nothing to do
return null;
}
TransactionQueryEntity transactionQueryEntity = new TransactionQueryEntity();
transactionQueryEntity.setMaxCommitTime(maxCommitTime);
transactionQueryEntity.setTypeQNameId(deletedTypePair.getFirst());
Cursor<Long> cursor = template.selectCursor(SELECT_NODES_DELETED_BY_TXN_COMMIT_TIME, transactionQueryEntity);
return cursor.iterator();
}
public Iterator<Long> selectUnusedTransactionsByCommitTime(long maxCommitTime)
{
TransactionQueryEntity maxCommitTimeEntity = new TransactionQueryEntity();
maxCommitTimeEntity.setMaxCommitTime(maxCommitTime);
Cursor<Long> cursor = template.selectCursor(SELECT_TXNS_UNUSED_BY_TXN_COMMIT_TIME, maxCommitTimeEntity);
return cursor.iterator();
}
@Override
public List<String> purgeDeletedNodes(long minAge, int deleteBatchSize)
{
final long maxCommitTime = System.currentTimeMillis() - minAge;
Iterator<Long> nodeIdIterator = this.selectDeletedNodesByCommitTime(maxCommitTime);
ArrayList<Long> nodeIdList = new ArrayList<>();
List<String> deleteResult = new ArrayList<>();
if (isDebugEnabled)
{
logger.debug("nodes selected for deletion, deleteBatchSize:" + deleteBatchSize);
}
while (nodeIdIterator != null && nodeIdIterator.hasNext())
{
if (deleteBatchSize == nodeIdList.size())
{
int count = deleteSelectedNodesAndProperties(nodeIdList);
if (isDebugEnabled)
{
logger.debug("nodes deleted:" + count);
}
deleteResult.add("Purged old nodes: " + count);
nodeIdList.clear();
}
else
{
nodeIdList.add(nodeIdIterator.next());
}
}
if (nodeIdList.size() > 0)
{
int count = deleteSelectedNodesAndProperties(nodeIdList);
if (isDebugEnabled)
{
logger.debug("remaining nodes deleted:" + count);
}
deleteResult.add("Purged old nodes: " + count);
nodeIdList.clear();
}
return deleteResult;
}
public List<String> purgeEmptyTransactions(long minAge, int deleteBatchSize)
{
final long maxCommitTime = System.currentTimeMillis() - minAge;
Iterator<Long> transactionIdIterator = this.selectUnusedTransactionsByCommitTime(maxCommitTime);
ArrayList<Long> transactionIdList = new ArrayList<>();
List<String> deleteResult = new ArrayList<>();
if (isDebugEnabled)
{
logger.debug("transactions selected for deletion, deleteBatchSize:" + deleteBatchSize);
}
while (transactionIdIterator.hasNext())
{
if (deleteBatchSize == transactionIdList.size())
{
int count = deleteSelectedTransactions(transactionIdList);
deleteResult.add("Purged old transactions: " + count);
if (isDebugEnabled)
{
logger.debug("transactions deleted:" + count);
}
transactionIdList.clear();
}
else
{
transactionIdList.add(transactionIdIterator.next());
}
}
if (transactionIdList.size() > 0)
{
int count = deleteSelectedTransactions(transactionIdList);
deleteResult.add("Purged old transactions: " + count);
if (isDebugEnabled)
{
logger.debug("final batch of transactions deleted:" + count);
}
transactionIdList.clear();
}
return deleteResult;
}
private int deleteSelectedNodesAndProperties(List<Long> nodeIdList)
{
int cnt = template.delete(DELETE_NODE_PROPS_BY_NODE_ID, nodeIdList);
if (isDebugEnabled)
{
logger.debug("nodes props deleted:" + cnt);
}
// Finally, remove the nodes
cnt = template.delete(DELETE_NODES_BY_ID, nodeIdList);
if (isDebugEnabled)
{
logger.debug("nodes deleted:" + cnt);
}
return cnt;
}
private int deleteSelectedTransactions(List<Long> transactionIdList)
{
return template.delete(DELETE_TXNS_UNUSED_BY_ID, transactionIdList);
}
/*
* DAO OVERRIDES
*/
@@ -2074,4 +1936,4 @@ public class NodeDAOImpl extends AbstractNodeDAOImpl
assocQName);
}
}
}
}

View File

@@ -1,106 +1,106 @@
/*
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.node.db;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.domain.node.ChildAssocEntity;
import org.alfresco.repo.domain.node.Node;
import org.alfresco.repo.domain.node.NodeDAO;
import org.alfresco.repo.domain.node.NodeDAO.ChildAssocRefQueryCallback;
import org.alfresco.repo.domain.node.NodeExistsException;
import org.alfresco.repo.domain.qname.QNameDAO;
import org.alfresco.repo.node.AbstractNodeServiceImpl;
import org.alfresco.repo.node.StoreArchiveMap;
import org.alfresco.repo.node.archive.NodeArchiveService;
import org.alfresco.repo.node.db.NodeHierarchyWalker.VisitedNode;
import org.alfresco.repo.node.db.traitextender.NodeServiceExtension;
import org.alfresco.repo.node.db.traitextender.NodeServiceTrait;
import org.alfresco.repo.policy.BehaviourFilter;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport.TxnReadState;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.repo.transaction.TransactionalResourceHelper;
import org.alfresco.service.cmr.dictionary.AspectDefinition;
import org.alfresco.service.cmr.dictionary.AssociationDefinition;
import org.alfresco.service.cmr.dictionary.ChildAssociationDefinition;
import org.alfresco.service.cmr.dictionary.ClassDefinition;
import org.alfresco.service.cmr.dictionary.InvalidAspectException;
import org.alfresco.service.cmr.dictionary.InvalidTypeException;
import org.alfresco.service.cmr.dictionary.PropertyDefinition;
import org.alfresco.service.cmr.dictionary.TypeDefinition;
import org.alfresco.service.cmr.repository.AssociationExistsException;
import org.alfresco.service.cmr.repository.AssociationRef;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.InvalidChildAssociationRefException;
import org.alfresco.service.cmr.repository.InvalidNodeRefException;
import org.alfresco.service.cmr.repository.InvalidStoreRefException;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeRef.Status;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.Path;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.cmr.security.AccessPermission;
import org.alfresco.service.cmr.security.AccessStatus;
import org.alfresco.service.cmr.security.OwnableService;
import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.namespace.QNamePattern;
import org.alfresco.service.namespace.RegexQNamePattern;
import org.alfresco.traitextender.AJProxyTrait;
import org.alfresco.traitextender.Extend;
import org.alfresco.traitextender.ExtendedTrait;
import org.alfresco.traitextender.Extensible;
import org.alfresco.traitextender.Trait;
import org.alfresco.util.EqualsHelper;
import org.alfresco.util.GUID;
import org.alfresco.util.Pair;
import org.alfresco.util.ParameterCheck;
import org.alfresco.util.PropertyMap;
import org.alfresco.util.transaction.TransactionListenerAdapter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.domain.node.ChildAssocEntity;
import org.alfresco.repo.domain.node.Node;
import org.alfresco.repo.domain.node.NodeDAO;
import org.alfresco.repo.domain.node.NodeDAO.ChildAssocRefQueryCallback;
import org.alfresco.repo.domain.node.NodeExistsException;
import org.alfresco.repo.domain.qname.QNameDAO;
import org.alfresco.repo.node.AbstractNodeServiceImpl;
import org.alfresco.repo.node.StoreArchiveMap;
import org.alfresco.repo.node.archive.NodeArchiveService;
import org.alfresco.repo.node.db.NodeHierarchyWalker.VisitedNode;
import org.alfresco.repo.node.db.traitextender.NodeServiceExtension;
import org.alfresco.repo.node.db.traitextender.NodeServiceTrait;
import org.alfresco.repo.policy.BehaviourFilter;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport.TxnReadState;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.repo.transaction.TransactionalResourceHelper;
import org.alfresco.service.cmr.dictionary.AspectDefinition;
import org.alfresco.service.cmr.dictionary.AssociationDefinition;
import org.alfresco.service.cmr.dictionary.ChildAssociationDefinition;
import org.alfresco.service.cmr.dictionary.ClassDefinition;
import org.alfresco.service.cmr.dictionary.InvalidAspectException;
import org.alfresco.service.cmr.dictionary.InvalidTypeException;
import org.alfresco.service.cmr.dictionary.PropertyDefinition;
import org.alfresco.service.cmr.dictionary.TypeDefinition;
import org.alfresco.service.cmr.repository.AssociationExistsException;
import org.alfresco.service.cmr.repository.AssociationRef;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.InvalidChildAssociationRefException;
import org.alfresco.service.cmr.repository.InvalidNodeRefException;
import org.alfresco.service.cmr.repository.InvalidStoreRefException;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeRef.Status;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.Path;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.cmr.security.AccessPermission;
import org.alfresco.service.cmr.security.AccessStatus;
import org.alfresco.service.cmr.security.OwnableService;
import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.namespace.QNamePattern;
import org.alfresco.service.namespace.RegexQNamePattern;
import org.alfresco.traitextender.AJProxyTrait;
import org.alfresco.traitextender.Extend;
import org.alfresco.traitextender.ExtendedTrait;
import org.alfresco.traitextender.Extensible;
import org.alfresco.traitextender.Trait;
import org.alfresco.util.EqualsHelper;
import org.alfresco.util.GUID;
import org.alfresco.util.Pair;
import org.alfresco.util.ParameterCheck;
import org.alfresco.util.PropertyMap;
import org.alfresco.util.transaction.TransactionListenerAdapter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.extensions.surf.util.I18NUtil;
/**
@@ -844,25 +844,26 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl implements Extens
// get the node
final Pair<Long, NodeRef> nodePair = getNodePairNotNull(nodeRef);
final Long nodeId = nodePair.getFirst();
if (!nodeDAO.hasNodeAspect(nodeId, aspectTypeQName))
{
return;
}
boolean hadAspect = nodeDAO.hasNodeAspect(nodeId, aspectTypeQName);
// Invoke policy behaviours
invokeBeforeUpdateNode(nodeRef);
invokeBeforeRemoveAspect(nodeRef, aspectTypeQName);
nodeDAO.removeNodeAspects(nodeId, Collections.singleton(aspectTypeQName));
if (hadAspect)
{
invokeBeforeRemoveAspect(nodeRef, aspectTypeQName);
nodeDAO.removeNodeAspects(nodeId, Collections.singleton(aspectTypeQName));
}
AspectDefinition aspectDef = dictionaryService.getAspect(aspectTypeQName);
boolean updated = false;
if (aspectDef != null)
{
// Remove default properties
Map<QName, PropertyDefinition> propertyDefs = aspectDef.getProperties();
Map<QName,PropertyDefinition> propertyDefs = aspectDef.getProperties();
Set<QName> propertyToRemoveQNames = propertyDefs.keySet();
nodeDAO.removeNodeProperties(nodeId, propertyToRemoveQNames);
// Remove child associations
// We have to iterate over the associations and remove all those between the parent and child
final List<Pair<Long, ChildAssociationRef>> assocsToDelete = new ArrayList<Pair<Long, ChildAssociationRef>>(5);
@@ -874,24 +875,29 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl implements Extens
return true;
}
@Override public boolean orderResults()
@Override
public boolean orderResults()
{
return false;
}
public boolean handle(Pair<Long, ChildAssociationRef> childAssocPair, Pair<Long, NodeRef> parentNodePair,
Pair<Long, NodeRef> childNodePair)
public boolean handle(
Pair<Long, ChildAssociationRef> childAssocPair,
Pair<Long, NodeRef> parentNodePair,
Pair<Long, NodeRef> childNodePair
)
{
if (isPendingDelete(parentNodePair.getSecond()) || isPendingDelete(childNodePair.getSecond()))
{
if (logger.isTraceEnabled())
{
logger.trace("Aspect-triggered association removal: "
+ "Ignoring child associations where one of the nodes is pending delete: " + childAssocPair);
logger.trace(
"Aspect-triggered association removal: " +
"Ignoring child associations where one of the nodes is pending delete: " + childAssocPair);
}
return true;
}
// Double check that it's not a primary association. If so, we can't delete it and
// have to delete the child node directly and with full archival.
if (childAssocPair.getSecond().isPrimary())
@@ -908,7 +914,7 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl implements Extens
public void done()
{
}
}
};
// Get all the QNames to remove
Set<QName> assocTypeQNamesToRemove = new HashSet<QName>(aspectDef.getChildAssociations().keySet());
@@ -924,14 +930,14 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl implements Extens
nodeDAO.deleteChildAssoc(assocId);
invokeOnDeleteChildAssociation(assocRef);
}
// Cascade-delete any nodes that were attached to primary associations
for (Pair<Long, NodeRef> childNodePair : nodesToDelete)
{
NodeRef childNodeRef = childNodePair.getSecond();
this.deleteNode(childNodeRef);
}
// Gather peer associations to delete
Map<QName, AssociationDefinition> nodeAssocDefs = aspectDef.getAssociations();
List<Long> nodeAssocIdsToRemove = new ArrayList<Long>(13);
@@ -943,8 +949,8 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl implements Extens
if (logger.isTraceEnabled())
{
logger.trace(
"Aspect-triggered association removal: " + "Ignoring peer associations where one of the nodes is pending delete: "
+ nodeRef);
"Aspect-triggered association removal: " +
"Ignoring peer associations where one of the nodes is pending delete: " + nodeRef);
}
continue;
}
@@ -961,8 +967,9 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl implements Extens
{
if (logger.isTraceEnabled())
{
logger.trace("Aspect-triggered association removal: "
+ "Ignoring peer associations where one of the nodes is pending delete: " + assocPair);
logger.trace(
"Aspect-triggered association removal: " +
"Ignoring peer associations where one of the nodes is pending delete: " + assocPair);
}
continue;
}
@@ -983,15 +990,16 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl implements Extens
}
updated = updated || assocsDeleted > 0;
}
// Invoke policy behaviours
if (updated)
{
invokeOnUpdateNode(nodeRef);
}
invokeOnRemoveAspect(nodeRef, aspectTypeQName);
if (hadAspect)
{
invokeOnRemoveAspect(nodeRef, aspectTypeQName);
}
}
/**

View File

@@ -50,13 +50,6 @@ public class DeletedNodeCleanupWorker extends AbstractNodeCleanupWorker
// of the chunk (in ms). Default is a couple of hours.
private int purgeSize = 7200000; // ms
//to determine if we need a time based window deletion of nodes or in fixed size batches.
private String algorithm;
private int deleteBatchSize;
private static final String NODE_TABLE_CLEANER_ALG_V2 = "V2";
/**
* Default constructor
*/
@@ -74,57 +67,15 @@ public class DeletedNodeCleanupWorker extends AbstractNodeCleanupWorker
{
return Collections.singletonList("Minimum purge age is negative; purge disabled");
}
List<String> purgedNodes, purgedTxns;
if (NODE_TABLE_CLEANER_ALG_V2.equals(algorithm))
{
refreshLock();
if (logger.isDebugEnabled())
{
logger.debug("DeletedNodeCleanupWorker using batch deletion: About to execute the clean up nodes ");
}
purgedNodes = purgeOldDeletedNodesV2(minPurgeAgeMs);
if (logger.isDebugEnabled())
{
logger.debug(purgedNodes);
}
refreshLock();
if (logger.isDebugEnabled())
{
logger.debug("DeletedNodeCleanupWorker: About to execute the clean up txns ");
}
purgedTxns = purgeOldEmptyTransactionsV2(minPurgeAgeMs);
}
else
{
if (logger.isDebugEnabled())
{
logger.debug("DeletedNodeCleanupWorker: About to start purgeOldDeletedNodes ");
}
purgedNodes = purgeOldDeletedNodes(minPurgeAgeMs);
logger.debug(purgedNodes);
if (logger.isDebugEnabled())
{
logger.debug("DeletedNodeCleanupWorker: About to start purgeOldEmptyTransactions ");
}
purgedTxns = purgeOldEmptyTransactions(minPurgeAgeMs);
}
if (logger.isDebugEnabled())
{
logger.debug(purgedTxns);
}
List<String> allResults = new ArrayList<>(100);
List<String> purgedNodes = purgeOldDeletedNodes(minPurgeAgeMs);
List<String> purgedTxns = purgeOldEmptyTransactions(minPurgeAgeMs);
List<String> allResults = new ArrayList<String>(100);
allResults.addAll(purgedNodes);
allResults.addAll(purgedTxns);
// Done
return allResults;
}
@@ -159,17 +110,7 @@ public class DeletedNodeCleanupWorker extends AbstractNodeCleanupWorker
this.purgeSize = purgeSize;
}
public void setAlgorithm(String algorithm)
{
this.algorithm = algorithm;
}
public void setDeleteBatchSize(int deleteBatchSize)
{
this.deleteBatchSize = deleteBatchSize;
}
/**
/**
* Cleans up deleted nodes that are older than the given minimum age.
*
* @param minAge the minimum age of a transaction or deleted node
@@ -181,12 +122,10 @@ public class DeletedNodeCleanupWorker extends AbstractNodeCleanupWorker
final long maxCommitTime = System.currentTimeMillis() - minAge;
long fromCommitTime = fromCustomCommitTime;
if (fromCommitTime <= 0L)
{
fromCommitTime = nodeDAO.getMinTxnCommitTimeForDeletedNodes().longValue();
}
if ( fromCommitTime == 0L )
{
String msg = "There are no old nodes to purge.";
@@ -195,10 +134,7 @@ public class DeletedNodeCleanupWorker extends AbstractNodeCleanupWorker
}
long loopPurgeSize = purgeSize;
if(logger.isDebugEnabled())
{
logger.debug("DeletedNodeCleanupWorker: purgeOldDeletedNodes started ");
}
Long purgeCount = new Long(0);
while (true)
{
// Ensure we keep the lock
@@ -217,9 +153,9 @@ public class DeletedNodeCleanupWorker extends AbstractNodeCleanupWorker
try
{
DeleteNodesByTransactionsCallback purgeNodesCallback = new DeleteNodesByTransactionsCallback(nodeDAO, fromCommitTime, toCommitTime);
Long purgeCount = txnHelper.doInTransaction(purgeNodesCallback, false, true);
purgeCount = txnHelper.doInTransaction(purgeNodesCallback, false, true);
if (purgeCount > 0)
if (purgeCount.longValue() > 0)
{
String msg =
"Purged old nodes: \n" +
@@ -284,8 +220,7 @@ public class DeletedNodeCleanupWorker extends AbstractNodeCleanupWorker
break;
}
}
logger.debug("DeletedNodeCleanupWorker: purgeOldDeletedNodes finished ");
// Done
return results;
}
@@ -310,10 +245,6 @@ public class DeletedNodeCleanupWorker extends AbstractNodeCleanupWorker
{
fromCommitTime = nodeDAO.getMinUnusedTxnCommitTime().longValue();
}
if(logger.isDebugEnabled())
{
logger.debug("DeletedNodeCleanupWorker: purgeOldEmptyTransactions started ");
}
// delete unused transactions in batches of size 'purgeTxnBlockSize'
while (true)
{
@@ -367,46 +298,14 @@ public class DeletedNodeCleanupWorker extends AbstractNodeCleanupWorker
}
fromCommitTime += purgeSize;
if (fromCommitTime >= maxCommitTime)
if(fromCommitTime >= maxCommitTime)
{
break;
break;
}
}
logger.debug("DeletedNodeCleanupWorker: purgeOldEmptyTransactions finished ");
// Done
return results;
}
private List<String> purgeOldDeletedNodesV2(long minAge)
{
refreshLock();
final List<String> returnList = new ArrayList<>();
RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper();
RetryingTransactionCallback<Void> callback = () -> {
returnList.addAll(nodeDAO.purgeDeletedNodes(minAge, deleteBatchSize));
return null;
};
txnHelper.doInTransaction(callback, false, true);
return returnList;
}
private List<String> purgeOldEmptyTransactionsV2(long minAge)
{
refreshLock();
final List<String> returnList = new ArrayList<>();
RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper();
RetryingTransactionCallback<Void> callback = () -> {
returnList.addAll(nodeDAO.purgeEmptyTransactions(minAge, deleteBatchSize));
return null;
};
txnHelper.doInTransaction(callback, false, true);
return returnList;
}
private static abstract class DeleteByTransactionsCallback implements RetryingTransactionCallback<Long>
{
@@ -457,5 +356,4 @@ public class DeletedNodeCleanupWorker extends AbstractNodeCleanupWorker
return count;
}
}
}
}

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2022 Alfresco Software Limited
* Copyright (C) 2021 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -29,22 +29,15 @@ import org.alfresco.repo.content.transform.LocalTransform;
import org.alfresco.repo.content.transform.LocalTransformServiceRegistry;
import org.alfresco.repo.content.transform.UnsupportedTransformationException;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentService;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.service.cmr.repository.DirectAccessUrl;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.transform.client.model.config.CoreFunction;
import org.alfresco.util.PropertyCheck;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.InitializingBean;
import java.util.HashMap;
import java.util.Map;
import static org.alfresco.model.ContentModel.PROP_CONTENT;
import static org.alfresco.transform.client.util.RequestParamMap.DIRECT_ACCESS_URL;
/**
* Request synchronous transforms.
*
@@ -58,30 +51,16 @@ public class LocalSynchronousTransformClient implements SynchronousTransformClie
private static Log logger = LogFactory.getLog(LocalTransformClient.class);
private LocalTransformServiceRegistry localTransformServiceRegistry;
private ContentService contentService;
private boolean directAccessUrlEnabled;
public void setLocalTransformServiceRegistry(LocalTransformServiceRegistry localTransformServiceRegistry)
{
this.localTransformServiceRegistry = localTransformServiceRegistry;
}
public void setContentService(ContentService contentService)
{
this.contentService = contentService;
}
public void setDirectAccessUrlEnabled(boolean directAccessUrlEnabled)
{
this.directAccessUrlEnabled = directAccessUrlEnabled;
}
@Override
public void afterPropertiesSet() throws Exception
{
PropertyCheck.mandatory(this, "localTransformServiceRegistry", localTransformServiceRegistry);
PropertyCheck.mandatory(this, "contentService", contentService);
PropertyCheck.mandatory(this, "transformDirectAccessUrlEnabled", directAccessUrlEnabled);
}
@Override
@@ -144,7 +123,6 @@ public class LocalSynchronousTransformClient implements SynchronousTransformClie
logger.debug(TRANSFORM + "requested " + renditionName);
}
actualOptions = addDirectAccessUrlToOptionsIfPossible(actualOptions, sourceNodeRef, transform);
transform.transform(reader, writer, actualOptions, renditionName, sourceNodeRef);
if (logger.isDebugEnabled())
@@ -162,20 +140,6 @@ public class LocalSynchronousTransformClient implements SynchronousTransformClie
}
}
private Map<String, String> addDirectAccessUrlToOptionsIfPossible(Map<String, String> actualOptions,
NodeRef sourceNodeRef, LocalTransform transform)
{
if (directAccessUrlEnabled &&
localTransformServiceRegistry.isSupported(CoreFunction.DIRECT_ACCESS_URL, transform) &&
contentService.isContentDirectUrlEnabled(sourceNodeRef, PROP_CONTENT))
{
DirectAccessUrl directAccessUrl = contentService.requestContentDirectUrl(sourceNodeRef, PROP_CONTENT, true);
actualOptions = new HashMap<>(actualOptions);
actualOptions.put(DIRECT_ACCESS_URL, directAccessUrl.getContentUrl());
}
return actualOptions;
}
@Override
public String getName()
{

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2022 Alfresco Software Limited
* Copyright (C) 2019 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -32,24 +32,18 @@ import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentService;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.service.cmr.repository.DirectAccessUrl;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.transform.client.model.config.CoreFunction;
import org.alfresco.util.PropertyCheck;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.InitializingBean;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import static org.alfresco.model.ContentModel.PROP_CONTENT;
import static org.alfresco.transform.client.util.RequestParamMap.DIRECT_ACCESS_URL;
/**
* Requests rendition transforms take place using transforms available on the local machine (based on
* {@link LocalTransform}. The transform and consumption of the
@@ -67,7 +61,6 @@ public class LocalTransformClient implements TransformClient, InitializingBean
private TransactionService transactionService;
private ContentService contentService;
private RenditionService2Impl renditionService2;
private boolean directAccessUrlEnabled;
private ExecutorService executorService;
private ThreadLocal<LocalTransform> transform = new ThreadLocal<>();
@@ -92,11 +85,6 @@ public class LocalTransformClient implements TransformClient, InitializingBean
this.renditionService2 = renditionService2;
}
public void setDirectAccessUrlEnabled(boolean directAccessUrlEnabled)
{
this.directAccessUrlEnabled = directAccessUrlEnabled;
}
public void setExecutorService(ExecutorService executorService)
{
this.executorService = executorService;
@@ -109,7 +97,6 @@ public class LocalTransformClient implements TransformClient, InitializingBean
PropertyCheck.mandatory(this, "transactionService", transactionService);
PropertyCheck.mandatory(this, "contentService", contentService);
PropertyCheck.mandatory(this, "renditionService2", renditionService2);
PropertyCheck.mandatory(this, "directAccessUrlEnabled", directAccessUrlEnabled);
if (executorService == null)
{
executorService = Executors.newCachedThreadPool();
@@ -143,9 +130,8 @@ public class LocalTransformClient implements TransformClient, InitializingBean
{
String renditionName = renditionDefinition.getRenditionName();
String targetMimetype = renditionDefinition.getTargetMimetype();
Map<String, String> renditionOptions = renditionDefinition.getTransformOptions();
Map<String, String> actualOptions = renditionDefinition.getTransformOptions();
LocalTransform localTransform = transform.get();
Map<String, String> actualOptions = addDirectAccessUrlToOptionsIfPossible(renditionOptions, sourceNodeRef, localTransform);
executorService.submit(() ->
{
@@ -201,18 +187,4 @@ public class LocalTransformClient implements TransformClient, InitializingBean
}), user);
});
}
private Map<String, String> addDirectAccessUrlToOptionsIfPossible(Map<String, String> actualOptions,
NodeRef sourceNodeRef, LocalTransform transform)
{
if (directAccessUrlEnabled &&
localTransformServiceRegistry.isSupported(CoreFunction.DIRECT_ACCESS_URL, transform) &&
contentService.isContentDirectUrlEnabled(sourceNodeRef, PROP_CONTENT))
{
DirectAccessUrl directAccessUrl = contentService.requestContentDirectUrl(sourceNodeRef, PROP_CONTENT, true);
actualOptions = new HashMap<>(actualOptions);
actualOptions.put(DIRECT_ACCESS_URL, directAccessUrl.getContentUrl());
}
return actualOptions;
}
}

View File

@@ -43,6 +43,7 @@ import org.alfresco.repo.domain.tenant.TenantEntity;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.tenant.TenantUtil.TenantRunAsWork;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.Experimental;
import org.alfresco.service.transaction.TransactionService;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
@@ -275,6 +276,7 @@ public abstract class AbstractTenantRoutingContentStore extends AbstractRoutingC
/**
* {@inheritDoc}
*/
@Experimental
@Override
public Map<String, String> getStorageProperties(String contentUrl)
{
@@ -285,6 +287,7 @@ public abstract class AbstractTenantRoutingContentStore extends AbstractRoutingC
* {@inheritDoc}
*/
@Override
@Experimental
public boolean requestSendContentToArchive(String contentUrl, Map<String, Serializable> archiveParams)
{
return getTenantContentStore().requestSendContentToArchive(contentUrl, archiveParams);
@@ -294,6 +297,7 @@ public abstract class AbstractTenantRoutingContentStore extends AbstractRoutingC
* {@inheritDoc}
*/
@Override
@Experimental
public boolean requestRestoreContentFromArchive(String contentUrl, Map<String, Serializable> restoreParams)
{
return getTenantContentStore().requestRestoreContentFromArchive(contentUrl, restoreParams);

View File

@@ -1,33 +1,33 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.tenant;
import java.sql.SQLException;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.dbcp.BasicDataSource;
/**
* Experimental
@@ -41,7 +41,7 @@ public class TenantBasicDataSource extends BasicDataSource
{
// tenant-specific
this.setUrl(tenantUrl);
this.setMaxTotal(tenantMaxActive == -1 ? bds.getMaxTotal() : tenantMaxActive);
this.setMaxActive(tenantMaxActive == -1 ? bds.getMaxActive() : tenantMaxActive);
// defaults/overrides - see also 'baseDefaultDataSource' (core-services-context.xml + repository.properties)
@@ -54,7 +54,7 @@ public class TenantBasicDataSource extends BasicDataSource
this.setMaxIdle(bds.getMaxIdle());
this.setDefaultAutoCommit(bds.getDefaultAutoCommit());
this.setDefaultTransactionIsolation(bds.getDefaultTransactionIsolation());
this.setMaxWaitMillis(bds.getMaxWaitMillis());
this.setMaxWait(bds.getMaxWait());
this.setValidationQuery(bds.getValidationQuery());
this.setTimeBetweenEvictionRunsMillis(bds.getTimeBetweenEvictionRunsMillis());
this.setMinEvictableIdleTimeMillis(bds.getMinEvictableIdleTimeMillis());
@@ -62,7 +62,7 @@ public class TenantBasicDataSource extends BasicDataSource
this.setTestOnBorrow(bds.getTestOnBorrow());
this.setTestOnReturn(bds.getTestOnReturn());
this.setTestWhileIdle(bds.getTestWhileIdle());
this.setRemoveAbandonedOnBorrow(bds.getRemoveAbandonedOnBorrow());
this.setRemoveAbandoned(bds.getRemoveAbandoned());
this.setRemoveAbandonedTimeout(bds.getRemoveAbandonedTimeout());
this.setPoolPreparedStatements(bds.isPoolPreparedStatements());
this.setMaxOpenPreparedStatements(bds.getMaxOpenPreparedStatements());

View File

@@ -1,28 +1,28 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.tenant;
import java.sql.SQLException;
@@ -32,7 +32,7 @@ import java.util.Map;
import javax.sql.DataSource;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.dbcp.BasicDataSource;
import org.springframework.extensions.surf.util.ParameterCheck;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;

View File

@@ -29,6 +29,7 @@ package org.alfresco.service.cmr.repository;
import org.alfresco.api.AlfrescoPublicApi;
import org.alfresco.model.ContentModel;
import org.alfresco.service.Auditable;
import org.alfresco.service.Experimental;
import org.alfresco.service.cmr.dictionary.InvalidTypeException;
import org.alfresco.service.namespace.QName;
@@ -259,6 +260,7 @@ public interface ContentService
* @return Returns a key-value (String-String) collection of storage headers/properties with their respective values for a given {@link NodeRef}.
*/
@Auditable(parameters = {"nodeRef", "propertyQName"})
@Experimental
default Map<String, String> getStorageProperties(NodeRef nodeRef, QName propertyQName)
{
return Collections.emptyMap();
@@ -268,6 +270,7 @@ public interface ContentService
* Submit a request to send content to archive (offline) state.
* If no connector is present or connector is not supporting sending to archive, then {@link UnsupportedOperationException} will be returned.
* Specific connector will decide which storage class/tier will be set for content.
* This method is experimental and subject to changes.
*
* @param nodeRef a reference to a node having a content property
* @param propertyQName the name of the property, which must be of type <b>content</b>
@@ -276,6 +279,7 @@ public interface ContentService
* @throws UnsupportedOperationException when method not implemented
*/
@Auditable(parameters = {"nodeRef", "propertyQName", "archiveParams"})
@Experimental
default boolean requestSendContentToArchive(NodeRef nodeRef, QName propertyQName,
Map<String, Serializable> archiveParams)
{
@@ -289,6 +293,7 @@ public interface ContentService
* Keys of this map should be restricted to {@code ContentRestoreParams} enumeration.
* For AWS S3 map can indicating expiry days, Glacier restore tier.
* For Azure Blob map can indicate rehydrate priority.
* This method is experimental and subject to changes.
*
* @param nodeRef a reference to a node having a content property
* @param propertyQName the name of the property, which must be of type <b>content</b>
@@ -297,6 +302,7 @@ public interface ContentService
* @throws UnsupportedOperationException when method not implemented
*/
@Auditable(parameters = {"nodeRef", "propertyQName", "restoreParams"})
@Experimental
default boolean requestRestoreContentFromArchive(NodeRef nodeRef, QName propertyQName, Map<String, Serializable> restoreParams)
{
throw new UnsupportedOperationException("Request to restore content from archive is not supported by content service.");

View File

@@ -47,7 +47,7 @@ import java.io.StringReader;
import java.util.Collections;
import java.util.List;
import static org.alfresco.transform.client.util.RequestParamMap.ENDPOINT_TRANSFORM_CONFIG_LATEST;
import static org.alfresco.transform.client.util.RequestParamMap.INCLUDE_CORE_VERSION;
/**
* This class reads multiple T-Engine config and local files and registers as if they were all
@@ -106,7 +106,7 @@ public class CombinedConfig extends CombinedTransformConfig
private boolean addRemoteConfig(String baseUrl, String remoteType)
{
String url = baseUrl + (baseUrl.endsWith("/") ? "" : "/") + ENDPOINT_TRANSFORM_CONFIG_LATEST;
String url = baseUrl + (baseUrl.endsWith("/") ? "" : "/") + "transform/config?" + INCLUDE_CORE_VERSION + "=" + true;
HttpGet httpGet = new HttpGet(url);
boolean successReadingConfig = true;
try

View File

@@ -30,7 +30,7 @@
<property name="initialSize">
<value>0</value>
</property>
<property name="maxTotal">
<property name="maxActive">
<value>1</value>
</property>
<property name="maxIdle">

View File

@@ -156,7 +156,7 @@
<bean id="defaultDataSource" parent="baseDefaultDataSource" />
<!-- Datasource bean -->
<bean id="baseDefaultDataSource" class="org.apache.commons.dbcp2.BasicDataSource" destroy-method="close" abstract="true">
<bean id="baseDefaultDataSource" class="org.apache.commons.dbcp.BasicDataSource" destroy-method="close" abstract="true">
<property name="driverClassName">
<value>${db.driver}</value>
</property>
@@ -172,7 +172,7 @@
<property name="initialSize" >
<value>${db.pool.initial}</value>
</property>
<property name="maxTotal" >
<property name="maxActive" >
<value>${db.pool.max}</value>
</property>
<property name="minIdle" >
@@ -187,7 +187,7 @@
<property name="defaultTransactionIsolation" >
<value>${db.txn.isolation}</value>
</property>
<property name="maxWaitMillis" >
<property name="maxWait" >
<value>${db.pool.wait.max}</value>
</property>
<property name="validationQuery" >
@@ -211,7 +211,7 @@
<property name="testWhileIdle" >
<value>${db.pool.evict.validate}</value>
</property>
<property name="removeAbandonedOnBorrow" >
<property name="removeAbandoned" >
<value>${db.pool.abandoned.detect}</value>
</property>
<property name="removeAbandonedTimeout" >

View File

@@ -203,7 +203,6 @@ Inbound settings from iBatis
<mapper resource="alfresco/ibatis/#resource.dialect#/content-insert-SqlMap.xml"/>
<mapper resource="alfresco/ibatis/#resource.dialect#/node-common-SqlMap.xml"/>
<mapper resource="alfresco/ibatis/#resource.dialect#/node-select-children-SqlMap.xml"/>
<mapper resource="alfresco/ibatis/#resource.dialect#/node-select-SqlMap.xml"/>
<mapper resource="alfresco/ibatis/#resource.dialect#/node-update-SqlMap.xml"/>
<mapper resource="alfresco/ibatis/#resource.dialect#/node-delete-SqlMap.xml"/>
<mapper resource="alfresco/ibatis/#resource.dialect#/node-insert-SqlMap.xml"/>

View File

@@ -1505,32 +1505,5 @@
where
commit_time_ms > #{minCommitTime}
</select>
<delete id="delete_NodesById" parameterType="list">
delete from alf_node
where
id IN
<foreach item="item" index="index" collection="list" open="(" separator="," close=")">
#{item}
</foreach>
</delete>
<delete id="delete_NodePropsByNodeId" parameterType="list">
delete from alf_node_properties
where
node_id IN
<foreach item="item" index="index" collection="list" open="(" separator="," close=")">
#{item}
</foreach>
</delete>
<delete id="delete_Txns_UnusedById" parameterType="list">
delete from alf_transaction
where
id in
<foreach item="item" index="index" collection="list" open="(" separator="," close=")">
#{item}
</foreach>
</delete>
</mapper>

View File

@@ -1,33 +0,0 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="alfresco.node.select">
<select id="select_Deleted_NodesByTxnCommitTime" parameterType="TransactionQuery" fetchSize="100000" resultType="java.lang.Long">
select
node.id
from
alf_node node
join alf_transaction txn on (node.transaction_id = txn.id)
where
node.type_qname_id = #{typeQNameId}
<![CDATA[and commit_time_ms < #{maxCommitTime}]]>
</select>
<select id="select_Txns_UnusedByTxnCommitTime" parameterType="TransactionQuery" fetchSize="100000" resultType="java.lang.Long">
select
id
from alf_transaction
where not exists
(
select 1
from
alf_node node
where
node.transaction_id = alf_transaction.id
)
<![CDATA[and commit_time_ms <= #{maxCommitTime}]]>
</select>
</mapper>

View File

@@ -27,5 +27,5 @@
txn.commit_time_ms < #{maxCommitTime})
]]>
</delete>
</mapper>

View File

@@ -1,33 +0,0 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="alfresco.node.select">
<select id="select_Deleted_NodesByTxnCommitTime" parameterType="TransactionQuery" fetchSize="-2147483648" resultType="java.lang.Long">
select
node.id
from
alf_node node
join alf_transaction txn on (node.transaction_id = txn.id)
where
node.type_qname_id = #{typeQNameId}
<![CDATA[and commit_time_ms < #{maxCommitTime}]]>
</select>
<select id="select_Txns_UnusedByTxnCommitTime" parameterType="TransactionQuery" fetchSize="-2147483648" resultType="java.lang.Long">
select
id
from alf_transaction
where not exists
(
select 1
from
alf_node node
where
node.transaction_id = alf_transaction.id
)
<![CDATA[and commit_time_ms <= #{maxCommitTime}]]>
</select>
</mapper>

View File

@@ -238,14 +238,8 @@
<property name="purgeSize">
<value>${index.tracking.purgeSize}</value>
</property>
<property name="algorithm">
<value>${system.node_table_cleaner.algorithm}</value>
</property>
<property name="deleteBatchSize">
<value>${system.node_cleanup.delete_batchSize}</value>
</property>
</bean>
<!-- String length adjustment -->
<bean id="nodeStringLengthWorker" class="org.alfresco.repo.node.db.NodeStringLengthWorker">
<constructor-arg index="0" ref="nodeDAO" />

View File

@@ -81,15 +81,12 @@
<property name="transactionService" ref="transactionService" />
<property name="contentService" ref="contentService" />
<property name="renditionService2" ref="renditionService2" />
<property name="directAccessUrlEnabled" value="${local.transform.directAccessUrl.enabled}"/>
</bean>
<bean id="synchronousTransformClient" parent="localSynchronousTransformClient" />
<bean id="localSynchronousTransformClient" class="org.alfresco.repo.rendition2.LocalSynchronousTransformClient">
<property name="localTransformServiceRegistry" ref="localTransformServiceRegistry" />
<property name="contentService" ref="contentService" />
<property name="directAccessUrlEnabled" value="${local.transform.directAccessUrl.enabled}"/>
</bean>
<!-- Deprecated bean that converts transform properties to the deprecated TransformationOptions format -->

View File

@@ -3,7 +3,7 @@
repository.name=Main Repository
# Schema number
version.schema=16100
version.schema=16000
# Directory configuration
@@ -1246,11 +1246,6 @@ system.delete_not_exists.read_only=false
system.delete_not_exists.timeout_seconds=-1
system.prop_table_cleaner.algorithm=V2
# --Node cleanup batch - default settings
system.node_cleanup.delete_batchSize=1000
system.node_table_cleaner.algorithm=V1
# Configure the system-wide (ACS) settings for direct access urls.
#
# For Direct Access URLs to be usable on the service-layer, the feature must be enabled both system-wide and on the
@@ -1318,9 +1313,6 @@ restApi.directAccessUrl.enabled=false
# Direct Access Url REST API calls cannot request an explicit expiry time.
restApi.directAccessUrl.defaultExpiryTimeInSec=30
# Controls whether direct access url URLs may be used in transforms.
local.transform.directAccessUrl.enabled=true
# Creates additional indexes on alf_node and alf_transaction. Recommended for large repositories.
system.new-node-transaction-indexes.ignored=true

View File

@@ -87,8 +87,7 @@ import org.junit.runners.Suite;
org.alfresco.repo.node.cleanup.TransactionCleanupTest.class,
org.alfresco.repo.security.person.GetPeopleCannedQueryTest.class,
org.alfresco.repo.domain.schema.script.DeleteNotExistsExecutorTest.class,
org.alfresco.repo.node.cleanup.DeletedNodeBatchCleanupTest.class
org.alfresco.repo.domain.schema.script.DeleteNotExistsExecutorTest.class
})
public class AllDBTestsTestSuite
{

View File

@@ -63,7 +63,6 @@ import org.junit.runners.Suite;
org.alfresco.repo.jscript.ScriptSearchTest.class,
org.alfresco.repo.lock.LockUtilsTest.class,
org.alfresco.repo.lock.mem.LockStoreImplTest.class,
org.alfresco.repo.management.CheckRequiredClassesForLoggingConsoleUnitTest.class,
org.alfresco.repo.management.subsystems.CryptodocSwitchableApplicationContextFactoryTest.class,
org.alfresco.repo.module.ModuleDetailsImplTest.class,
org.alfresco.repo.module.ModuleVersionNumberTest.class,

View File

@@ -84,7 +84,6 @@ import org.junit.runners.Suite;
org.alfresco.repo.node.archive.ArchiveAndRestoreTest.class,
org.alfresco.repo.node.db.DbNodeServiceImplTest.class,
org.alfresco.repo.node.cleanup.TransactionCleanupTest.class,
org.alfresco.repo.node.cleanup.DeletedNodeBatchCleanupTest.class,
org.alfresco.repo.node.db.DbNodeServiceImplPropagationTest.class,
})
public class AppContext03TestSuite

View File

@@ -57,7 +57,7 @@ import org.alfresco.service.cmr.workflow.WorkflowAdminService;
import org.alfresco.service.descriptor.Descriptor;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.traitextender.SpringExtensionBundle;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.dbcp.BasicDataSource;
import org.junit.Before;
import org.junit.Test;

View File

@@ -32,7 +32,7 @@ import org.alfresco.heartbeat.jobs.HeartBeatJobScheduler;
import org.alfresco.repo.descriptor.DescriptorDAO;
import org.alfresco.service.cmr.repository.HBDataCollectorService;
import org.alfresco.service.descriptor.Descriptor;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.dbcp.BasicDataSource;
import org.junit.Before;
import org.junit.Test;

View File

@@ -31,12 +31,10 @@ import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.test_category.BaseSpringTestsCategory;
import org.alfresco.util.BaseSpringTest;
import org.alfresco.util.GUID;
import org.alfresco.util.PropertyMap;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -132,56 +130,4 @@ public class RemoveFeaturesActionExecuterTest extends BaseSpringTest
action2.setParameterValue(RemoveFeaturesActionExecuter.PARAM_ASPECT_NAME, ContentModel.ASPECT_VERSIONABLE);
this.executer.execute(action2, this.nodeRef);
}
/**
* Test removing aspect properties
*/
@Test
public void testRemovingAspectPropertiesAfterExecution()
{
QName QNAME_PUBLISHER = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "publisher");
QName QNAME_SUBJECT = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "subject");
// Execute the action
PropertyMap dublinCoreProperties = new PropertyMap(2);
dublinCoreProperties.put(QNAME_PUBLISHER, "publisher");
dublinCoreProperties.put(QNAME_SUBJECT, "subject");
nodeService.addAspect(nodeRef, ContentModel.ASPECT_DUBLINCORE, dublinCoreProperties);
// Check that the node has aspect properties
assertTrue(this.nodeService.hasAspect(this.nodeRef, ContentModel.ASPECT_DUBLINCORE));
assertTrue(this.nodeService.getProperties(this.nodeRef).containsKey(QNAME_PUBLISHER));
assertTrue(this.nodeService.getProperties(this.nodeRef).containsKey(QNAME_SUBJECT));
// Remove the aspect
ActionImpl action = new ActionImpl(null, ID, RemoveFeaturesActionExecuter.NAME, null);
action.setParameterValue(RemoveFeaturesActionExecuter.PARAM_ASPECT_NAME, ContentModel.ASPECT_DUBLINCORE);
this.executer.execute(action, this.nodeRef);
// Check that the node now no longer has aspect properties
assertFalse(this.nodeService.getProperties(this.nodeRef).containsKey(QNAME_PUBLISHER));
assertFalse(this.nodeService.getProperties(this.nodeRef).containsKey(QNAME_SUBJECT));
}
/**
* Test removing not added child aspect
*/
@Test
public void testRemovingNotAddedChildAspect()
{
QName QNAME_TITLE = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "title");
// Execute the action
PropertyMap titledProperties = new PropertyMap(1);
titledProperties.put(QNAME_TITLE, "title");
nodeService.addAspect(nodeRef, ContentModel.ASPECT_TITLED, titledProperties);
// Remove the child aspect which has not been added to the node
ActionImpl action = new ActionImpl(null, ID, RemoveFeaturesActionExecuter.NAME, null);
action.setParameterValue(RemoveFeaturesActionExecuter.PARAM_ASPECT_NAME, ContentModel.ASPECT_DUBLINCORE);
this.executer.execute(action, this.nodeRef);
// Now check that the node has parent aspect properties
assertTrue(this.nodeService.getProperties(this.nodeRef).containsKey(QNAME_TITLE));
}
}

View File

@@ -1,42 +0,0 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2021 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.management;
import junit.framework.TestCase;
import org.junit.Test;
public class CheckRequiredClassesForLoggingConsoleUnitTest extends TestCase
{
private static final String CLASS_NAME = "org.apache.log4j.jmx.HierarchyDynamicMBean";
@Test
public void testRequiredClassIsOnAClasspath() throws ClassNotFoundException
{
final Class<?> clazz = Class.forName(CLASS_NAME);
assertNotNull(clazz);
}
}

View File

@@ -1,366 +0,0 @@
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.node.cleanup;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Stream.of;
import javax.transaction.UserTransaction;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.cache.SimpleCache;
import org.alfresco.repo.domain.node.NodeDAO;
import org.alfresco.repo.domain.node.Transaction;
import org.alfresco.repo.domain.node.ibatis.NodeDAOImpl;
import org.alfresco.repo.node.db.DeletedNodeCleanupWorker;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.cmr.security.AuthenticationService;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.test_category.OwnJVMTestsCategory;
import org.alfresco.util.BaseSpringTest;
import org.alfresco.util.testing.category.DBTests;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.extensions.webscripts.GUID;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.annotation.DirtiesContext.ClassMode;
@Category({ OwnJVMTestsCategory.class, DBTests.class })
@DirtiesContext(classMode = ClassMode.BEFORE_EACH_TEST_METHOD)
public class DeletedNodeBatchCleanupTest extends BaseSpringTest
{
@Autowired
private AuthenticationService authenticationService;
@Autowired
private NodeDAO nodeDAO;
@Autowired
@Qualifier("node.nodesSharedCache")
private SimpleCache<Serializable, Serializable> nodesCache;
@Autowired
private DeletedNodeCleanupWorker worker;
@Autowired
private NamespaceService namespaceService;
@Autowired
private TransactionService transactionService;
@Autowired
private NodeService nodeService;
@Autowired
private SearchService searchService;
private RetryingTransactionHelper helper;
private List<NodeRef> testNodes;
@Before
public void before()
{
helper = transactionService.getRetryingTransactionHelper();
authenticationService.authenticate("admin", "admin".toCharArray());
resetWorkerConfig();
// create 5 test nodes
final NodeRef companyHome = getCompanyHome();
testNodes = IntStream.range(0, 5)
.mapToObj(i -> helper.doInTransaction(createNodeCallback(companyHome), false, true))
.collect(toList());
// clean up pre-existing data
helper.doInTransaction(() -> worker.doClean(), false, true);
}
private void resetWorkerConfig()
{
worker.setMinPurgeAgeDays(0);
worker.setAlgorithm("V2");
worker.setDeleteBatchSize(20);
}
private NodeRef getCompanyHome()
{
StoreRef storeRef = new StoreRef(StoreRef.PROTOCOL_WORKSPACE, "SpacesStore");
NodeRef storeRoot = nodeService.getRootNode(storeRef);
List<NodeRef> nodeRefs = searchService.selectNodes(storeRoot, "/app:company_home", null, namespaceService,
false);
return nodeRefs.get(0);
}
private RetryingTransactionCallback<NodeRef> createNodeCallback(NodeRef companyHome)
{
return () -> nodeService.createNode(
companyHome, ContentModel.ASSOC_CONTAINS, QName.createQName("test", GUID.generate()),
ContentModel.TYPE_CONTENT).getChildRef();
}
private void deleteNodes(NodeRef nodeRef, NodeRef... additionalNodeRefs)
{
Stream.concat(of(nodeRef), of(additionalNodeRefs))
.forEach(this::deleteNode);
}
private void deleteNode(NodeRef nodeRef)
{
helper.doInTransaction(new DeleteNode(nodeRef), false, true);
}
@Test
public void testPurgeNodesDeleted()
{
final NodeRef nodeRef4 = getNode(4);
final NodeRef nodeRef5 = getNode(5);
// delete nodes 4 and 5
deleteNodes(nodeRef4, nodeRef5);
// double-check that node 4 and 5 are present in deleted form
nodesCache.clear();
assertTrue("Node 4 is deleted but not purged", nodeDAO.getNodeRefStatus(nodeRef4).isDeleted());
assertTrue("Node 5 is deleted but not purged", nodeDAO.getNodeRefStatus(nodeRef5).isDeleted());
worker.doClean();
// verify that node 4 and 5 were purged
nodesCache.clear();
assertNull("Node 4 was not cleaned up", nodeDAO.getNodeRefStatus(nodeRef4));
assertNull("Node 5 was not cleaned up", nodeDAO.getNodeRefStatus(nodeRef5));
}
@Test
public void testNodesDeletedNotPurgedWhenNotAfterPurgeAge()
{
final NodeRef nodeRef1 = getNode(1);
final NodeRef nodeRef2 = getNode(2);
// delete nodes 1 and 2
deleteNodes(nodeRef1, nodeRef2);
// double-check that node 1 and 2 are present in deleted form
nodesCache.clear();
assertTrue("Node 1 is deleted but not purged", nodeDAO.getNodeRefStatus(nodeRef1).isDeleted());
assertTrue("Node 2 is deleted but not purged", nodeDAO.getNodeRefStatus(nodeRef2).isDeleted());
// run the worker
worker.setMinPurgeAgeDays(1);
worker.doClean();
// verify that node 1 and 2 were not purged
nodesCache.clear();
assertNotNull("Node 1 was cleaned up", nodeDAO.getNodeRefStatus(nodeRef1));
assertNotNull("Node 2 was cleaned up", nodeDAO.getNodeRefStatus(nodeRef2));
}
@Test
public void testPurgeUnusedTransactions() throws Exception
{
// Execute transactions that update a number of nodes. For nodeRef1, all but the last txn will be unused.
final long start = System.currentTimeMillis();
final Long minTxnId = nodeDAO.getMinTxnId();
final Map<NodeRef, List<String>> txnIds = createTransactions();
final List<String> txnIds1 = txnIds.get(getNode(1));
final List<String> txnIds2 = txnIds.get(getNode(2));
final List<String> txnIds3 = txnIds.get(getNode(3));
// Double-check that n4 and n5 are present in deleted form
nodesCache.clear();
UserTransaction txn = transactionService.getUserTransaction(true);
txn.begin();
try
{
assertTrue("Node 4 is deleted but not purged", nodeDAO.getNodeRefStatus(getNode(4)).isDeleted());
assertTrue("Node 5 is deleted but not purged", nodeDAO.getNodeRefStatus(getNode(5)).isDeleted());
}
finally
{
txn.rollback();
}
// run the transaction cleaner
worker.doClean();
// Get transactions committed after the test started
RetryingTransactionHelper.RetryingTransactionCallback<List<Transaction>> getTxnsCallback = () -> ((NodeDAOImpl) nodeDAO).selectTxns(
start, Long.MAX_VALUE, Integer.MAX_VALUE, null, null, true);
List<Transaction> txns = transactionService.getRetryingTransactionHelper()
.doInTransaction(getTxnsCallback, true, false);
List<String> expectedUnusedTxnIds = new ArrayList<>(10);
expectedUnusedTxnIds.addAll(txnIds1.subList(0, txnIds1.size() - 1));
List<String> expectedUsedTxnIds = new ArrayList<>(5);
expectedUsedTxnIds.add(txnIds1.get(txnIds1.size() - 1));
expectedUsedTxnIds.addAll(txnIds2);
expectedUsedTxnIds.addAll(txnIds3);
// 4 and 5 should not be in the list because they are deletes
// check that the correct transactions have been purged i.e. all except the last one to update the node
// i.e. in this case, all but the last one in txnIds1
List<String> unusedTxnsNotPurged = expectedUnusedTxnIds.stream()
.filter(txnId -> containsTransaction(txns, txnId))
.collect(toList());
if (!unusedTxnsNotPurged.isEmpty())
{
fail("Unused transaction(s) were not purged: " + unusedTxnsNotPurged);
}
long numFoundUnusedTxnIds = expectedUnusedTxnIds.stream()
.filter(txnId -> !containsTransaction(txns, txnId))
.count();
assertEquals(9, numFoundUnusedTxnIds);
// check that the correct transactions remain i.e. all those in txnIds2, txnIds3, txnIds4 and txnIds5
long numFoundUsedTxnIds = expectedUsedTxnIds.stream()
.filter(txnId -> containsTransaction(txns, txnId))
.count();
assertEquals(3, numFoundUsedTxnIds);
// Get transactions committed after the test started
RetryingTransactionHelper.RetryingTransactionCallback<List<Long>> getTxnsUnusedCallback = () -> nodeDAO.getTxnsUnused(
minTxnId, Long.MAX_VALUE, Integer.MAX_VALUE);
List<Long> txnsUnused = transactionService.getRetryingTransactionHelper()
.doInTransaction(getTxnsUnusedCallback, true, false);
assertEquals(0, txnsUnused.size());
// Double-check that n4 and n5 were removed as well
nodesCache.clear();
assertNull("Node 4 was not cleaned up", nodeDAO.getNodeRefStatus(getNode(4)));
assertNull("Node 5 was not cleaned up", nodeDAO.getNodeRefStatus(getNode(5)));
}
private boolean containsTransaction(List<Transaction> txns, String txnId)
{
return txns.stream()
.map(Transaction::getChangeTxnId)
.filter(changeTxnId -> changeTxnId.equals(txnId))
.map(match -> true)
.findFirst()
.orElse(false);
}
private Map<NodeRef, List<String>> createTransactions()
{
Map<NodeRef, List<String>> txnIds = new HashMap<>();
UpdateNode updateNode1 = new UpdateNode(getNode(1));
UpdateNode updateNode2 = new UpdateNode(getNode(2));
UpdateNode updateNode3 = new UpdateNode(getNode(3));
DeleteNode deleteNode4 = new DeleteNode(getNode(4));
DeleteNode deleteNode5 = new DeleteNode(getNode(5));
List<String> txnIds1 = new ArrayList<>();
List<String> txnIds2 = new ArrayList<>();
List<String> txnIds3 = new ArrayList<>();
List<String> txnIds4 = new ArrayList<>();
List<String> txnIds5 = new ArrayList<>();
txnIds.put(getNode(1), txnIds1);
txnIds.put(getNode(2), txnIds2);
txnIds.put(getNode(3), txnIds3);
txnIds.put(getNode(4), txnIds4);
txnIds.put(getNode(5), txnIds5);
for (int i = 0; i < 10; i++)
{
String txnId1 = helper.doInTransaction(updateNode1, false, true);
txnIds1.add(txnId1);
if (i == 0)
{
String txnId2 = helper.doInTransaction(updateNode2, false, true);
txnIds2.add(txnId2);
}
if (i == 1)
{
String txnId3 = helper.doInTransaction(updateNode3, false, true);
txnIds3.add(txnId3);
}
}
String txnId4 = helper.doInTransaction(deleteNode4, false, true);
txnIds4.add(txnId4);
String txnId5 = helper.doInTransaction(deleteNode5, false, true);
txnIds5.add(txnId5);
return txnIds;
}
private class UpdateNode implements RetryingTransactionHelper.RetryingTransactionCallback<String>
{
private final NodeRef nodeRef;
UpdateNode(NodeRef nodeRef)
{
this.nodeRef = nodeRef;
}
@Override
public String execute() throws Throwable
{
nodeService.setProperty(nodeRef, ContentModel.PROP_NAME, GUID.generate());
return AlfrescoTransactionSupport.getTransactionId();
}
}
private class DeleteNode implements RetryingTransactionHelper.RetryingTransactionCallback<String>
{
private final NodeRef nodeRef;
DeleteNode(NodeRef nodeRef)
{
this.nodeRef = nodeRef;
}
@Override
public String execute() throws Throwable
{
nodeService.addAspect(nodeRef, ContentModel.ASPECT_TEMPORARY, null);
nodeService.deleteNode(nodeRef);
return AlfrescoTransactionSupport.getTransactionId();
}
}
private NodeRef getNode(int i)
{
return testNodes.get(i - 1);
}
}

View File

@@ -110,8 +110,6 @@ public class TransactionCleanupTest
this.nodesCache = (SimpleCache<Serializable, Serializable>) ctx.getBean("node.nodesSharedCache");
this.worker = (DeletedNodeCleanupWorker)ctx.getBean("nodeCleanup.deletedNodeCleanup");
this.worker.setMinPurgeAgeDays(0);
this.worker.setAlgorithm("V1");
this.helper = transactionService.getRetryingTransactionHelper();
authenticationService.authenticate("admin", "admin".toCharArray());

Some files were not shown because too many files have changed in this diff Show More