Compare commits

...

73 Commits
2.0.2 ... 2.0.3

Author SHA1 Message Date
Travis CI User
28ca975ba3 [maven-release-plugin][skip ci]prepare release 2.0.3 2022-03-15 13:13:09 +00:00
Travis CI User
1fc2bc88e0 [maven-release-plugin][skip ci]prepare for next development iteration 2022-03-11 16:59:10 +00:00
Travis CI User
a06b58062a [maven-release-plugin][skip ci]prepare release 2.0.3-RC5 2022-03-11 16:59:07 +00:00
Domenico Sibilio
bd0ee47f2d Merge pull request #1389 from Alfresco/fix/ACS-2666_Solr-admin-console-logging-fix
ACS-2666 Avoid reload4j being copied to solr-webapp libs
2022-03-11 15:03:39 +01:00
Jamal Kaabi-Mofrad
b2022bfcba ACS-2666: Fixed license check failure. 2022-03-11 13:51:29 +00:00
Domenico Sibilio
05d294c295 ACS-2666 Avoid reload4j being copied to solr-webapp libs 2022-03-11 13:11:40 +01:00
Jamal Kaabi-Mofrad
c445201b3b ACS-2666: Upgrade lucene-solr to fix logging in solr admin web console. 2022-03-11 10:34:26 +00:00
Travis CI User
3926da1901 [maven-release-plugin][skip ci]prepare for next development iteration 2022-03-10 13:05:14 +00:00
Travis CI User
2400cc9a2c [maven-release-plugin][skip ci]prepare release 2.0.3-RC4 2022-03-10 13:05:12 +00:00
Vítor Moreira
076cbe032c Merge pull request #1388 from Alfresco/fix/ACS-2652_solr_admin_not_working_20x
Revert "Merge pull request #1376 from Alfresco/fix/MNT-22537_angular_…
2022-03-10 08:07:25 +00:00
Vitor Moreira
750968838a Revert "Merge pull request #1376 from Alfresco/fix/MNT-22537_angular_upgrade"
This reverts commit 872e59afd04fb4b0ed71152ccd60b322405c3e5c.
2022-03-09 17:21:22 +00:00
Piotr Żurek
20d1b320cf Merge pull request #1386 from Alfresco/fix/MNT-22770-V2.0.x
MNT-22770 Fix duplicates for sharded reindexing
2022-03-09 16:49:11 +01:00
pzurek
a3a798d7fd MNT-22770 Fix duplicates for sharded reindexing 2022-03-09 15:32:20 +01:00
Travis CI User
45536af8d8 [maven-release-plugin][skip ci]prepare for next development iteration 2022-03-06 20:49:09 +00:00
Travis CI User
5f88839d94 [maven-release-plugin][skip ci]prepare release 2.0.3-RC3 2022-03-06 20:49:07 +00:00
Jamal Kaabi-Mofrad
20205ecd91 ACS-2305: Fix licenses [release]. 2022-03-06 18:50:59 +00:00
Jamal Kaabi-Mofrad
d746688688 ACS-2305: Add atlassian repo. 2022-03-06 17:11:00 +00:00
Jamal Kaabi-Mofrad
44fe4770a7 ACS-2305: Upgrade lucene-solr to fix security vulnerabilities. 2022-03-06 15:48:50 +00:00
Jamal Kaabi-Mofrad
098d62b7af Merge pull request #1383 from Alfresco/fix/log4j-fix
Fix log4j security vulnerability reported by quay.io. [skip tests]
2022-03-04 21:46:51 +00:00
Jamal Kaabi-Mofrad
9f7bcf67b1 Fix build failure 2022-03-04 20:58:08 +00:00
Jamal Kaabi-Mofrad
9463570b71 Fix log4j security vulnerability reported in quay.io. 2022-03-04 18:36:59 +00:00
Vítor Moreira
d393d05c18 Merge pull request #1380 from Alfresco/fix/MNT-22816_unindexednode_npe_20x
MNT-22816: indexing a nonindexednode gives a NPE
2022-03-04 14:56:50 +00:00
Vítor Moreira
e206b12ca4 Merge pull request #1379 from Alfresco/fix/MNT-22817_fix_flow_unable_retrieve_nodes_metadata_20x
MNT-22817: Fix flow when unable to retrieve node's metadata
2022-03-04 14:10:57 +00:00
Vitor Moreira
9454b10fde MNT-22816: indexing a nonindexednode gives a NPE
(cherry picked from commit bdc534ac30951d64056f328a9aa578d46e73dad6)
2022-03-04 12:50:48 +00:00
Nicolas Barithel
a14ac99921 MNT-22817: Fix flow when unable to retrieve node's metadata
(cherry picked from commit 5a03c798091b809a52f7cc27152918ec38cdf42d)
2022-03-04 12:28:38 +00:00
Vítor Moreira
0ee9497f19 Merge pull request #1377 from Alfresco/fix/MNT-22537_angular_upgrade_20x
Merge pull request #1376 from Alfresco/fix/MNT-22537_angular_upgrade
2022-03-03 13:28:53 +00:00
Vítor Moreira
645fd6eede Merge pull request #1376 from Alfresco/fix/MNT-22537_angular_upgrade
MNT-22537: upgrade angularjs to 1.8.2
(cherry picked from commit 18c6ba8e687ec612b4c64f40d9b03ea764b6287a)
2022-03-03 11:53:39 +00:00
Travis CI User
a247fcaca3 [maven-release-plugin][skip ci]prepare for next development iteration 2022-03-02 17:28:09 +00:00
Travis CI User
ab84260861 [maven-release-plugin][skip ci]prepare release 2.0.3-RC2 2022-03-02 17:28:07 +00:00
Domenico Sibilio
782f5d69e2 Merge pull request #1375 from Alfresco/feature/ACS-2438_backport 2022-03-02 15:36:07 +01:00
Aleksandra Onych
73e38787ce [ACS-2565] cleanup docker-compose files
(cherry picked from commit 55a188dbd04a59fd9c9b7e2b39ae3a862982ddf1)
2022-03-02 13:38:53 +01:00
Damian.Ujma@hyland.com
14ecf93bdf ACS-2567 Update IE/SS READMEs
(cherry picked from commit 796d42499cc4b935f3b69822cb0b896fabf9389f)
2022-03-02 13:38:36 +01:00
Domenico Sibilio
08b1040554 ACS-2438 ACS-2544 ACS-2506 Backporting 2022-03-02 13:38:16 +01:00
Domenico Sibilio
e14c44b1a5 ACS-2563 Disallow alfresco.secureComms=none
(cherry picked from commit e26c7f08601e15fcde0ba51df90212d69b9800e1)
2022-03-02 13:36:52 +01:00
Domenico Sibilio
05144ad615 ACS-2506 - Enhance error message and add unit tests
(cherry picked from commit 4bd7640166adc8721dd41a269471fa1057e12ab8)
2022-03-02 13:34:14 +01:00
Domenico Sibilio
b0692b21fd ACS-2544 - Update JAVA_TOOL_OPTIONS to SOLR_OPTS
(cherry picked from commit dbb04c60bf126e0cc3b57467941d95b15d1f3864)
2022-03-02 13:34:06 +01:00
Domenico Sibilio
8835dba1e1 ACS-2544 - Add Docker image support for Shared Secret Authentication
(cherry picked from commit 95fd02e503dfe198f3d42bf0b25566bb8982eaa6)
2022-03-02 13:33:38 +01:00
Vítor Moreira
85f1d6c3fc Merge pull request #1365 from Alfresco/fix/MNT-22391_httpclient_upgrade_20x
MNT-22391 httpclient upgrade to 4.5.13
2022-03-02 11:10:02 +00:00
Alex Mukha
b924576e3b Delete CODEOWNERS 2022-03-01 11:32:25 +00:00
Vítor Moreira
4f503dcfe8 Merge pull request #1370 from Alfresco/fix/MNT-22818_logs_typo_20X
Merge pull request #1337 from nbarithel/fix-logs-typo
2022-02-28 16:18:56 +00:00
Tom Page
db98c95fc4 Merge pull request #1337 from nbarithel/fix-logs-typo
MNT-22818 Minor typo fixes in Search Services logging

(cherry picked from commit 48abd7d8b8b7428c15fcc4c24c510a8767d444a0)
2022-02-28 15:15:09 +00:00
Vítor Moreira
5f600b3c7c Merge pull request #1374 from Alfresco/fix/ACS-2600-make-builds-reproducible-20X
Merge pull request #1373 from Alfresco/fix/ACS-2600-make-builds-repro…
2022-02-28 13:53:01 +00:00
Piotr Żurek
59d1e1ed88 Merge pull request #1373 from Alfresco/fix/ACS-2600-make-builds-reproducible
ACS-2600 Removing `yum update`s

(cherry picked from commit 19b6fee2ff1d5886c411e4f478bca8956fdb4853)
2022-02-28 10:50:48 +00:00
Tom Page
f3a136cbcc Merge pull request #1364 from Alfresco/feature/MNT-22410_FixFTSReport_20x
MNT-22410 Fix FTS report (release/V2.0.x).
2022-02-22 10:16:10 +00:00
Eva Vasques
c7c1dfd4d8 Fix commons-lang3 version in notice.txt 2022-02-21 18:35:28 +00:00
evasques
e274a4e0c1 Merge pull request #1349 from Alfresco/fix/MNT-22391_httpclient_vuln
MNT-22391 Update Httpclient to 4.5.13

(cherry picked from commit c6eee8f93044777343c5ce5b18ff644ebecab451)
2022-02-21 16:20:40 +00:00
Tom Page
e98a7b22fd Merge pull request #1362 from Alfresco/feature/MNT-22410_E2ETest
MNT-22410 Fix report to only include cm:content nodes.

(cherry picked from commit 2df93c1d2f5919f84f929a9865a50446999e4934)
2022-02-21 15:52:03 +00:00
Tom Page
5259452367 Merge pull request #1355 from Alfresco/feature/MNT-22410_E2ETest
MNT-22410 E2E test for FTS report.

(cherry picked from commit ad11e804cd980c53d1df16644b89016674f93714)
2022-02-21 15:51:56 +00:00
Tom Page
573d849f47 Merge pull request #1353 from Alfresco/feature/MNT-22410_FixFTSReport
MNT-22410 Update FTS report to use correct field.

(cherry picked from commit 74f0a34de3cd337fe2c6bd86cd2ab9bdea5fd20c)
2022-02-21 15:51:47 +00:00
evasques
c63b6960ec Merge pull request #1359 from Alfresco/fix/MNT-22802_DB_ID_RANGE_idle_shard_fix_20x
Only add the latest transaction if not indexed when shard out of range
2022-02-18 17:04:12 +00:00
Eva Vasques
f5047a2fd7 Only add the latest transaction if not indexed when shard out of range
(cherry picked from commit 7b6ba5817f8b5744e4a672895d2327fe2ff0a411)
2022-02-18 14:58:31 +00:00
Vítor Moreira
329c654cea Merge pull request #1335 from Alfresco/fix/MNT-22547_upgrade_jquery_20x
MNT-22547: upgrade jquery to 3.5.1
2022-02-03 09:26:02 +00:00
Vitor Moreira
80447a287c MNT-22547: upgrade jquery to 3.5.1
(cherry picked from commit 4705683cf6a0e257934228168c951ee32ed89291)
2022-02-02 16:03:12 +00:00
Tom Page
2ce8b19e5d Merge pull request #1329 from Alfresco/feature/MNT-22712_MetadataTrackerNPE_20x
MNT-22712 Fix metadata tracker NPE
2022-02-01 13:37:59 +00:00
Tom Page
d2c7df5416 Merge release/V2.0.x into MNT-22712_MetadataTrackerNPE_20x. 2022-02-01 12:39:01 +00:00
Tom Page
252df880d0 Merge pull request #1330 from Alfresco/feature/ACS-2492_Centos7
ACS-2492 Upgrade to Centos 7.

(cherry picked from commit 3b435f79436d96a05ff78ad24896fda243f02d43)
2022-02-01 11:31:20 +00:00
George Evangelopoulos
a90d443828 Merge pull request #1299 from Alfresco/fix/MNT-22712-npe-in-solr-metadata-tracking
MNT-22712 introduced synchronized method

(cherry picked from commit 756d771373128e93fffb2ef390e39b67a28bb1d2)
2022-01-31 16:15:44 +00:00
Vítor Moreira
5f04b97757 Merge pull request #1323 from Alfresco/fix/simple_xml_safe_license_20X
simple-xml-safe isn't brought by solr but it replaces solr's simple-xml v2.0.x
2022-01-25 19:35:24 +00:00
Vitor Moreira
cc71efc717 simple-xml-safe isn't brought by solr but it replaces solr's simple-xml 2022-01-25 12:13:37 +00:00
Vítor Moreira
7de8a99cd7 Merge pull request #1312 from Alfresco/fix/MNT-22553_webflux_vulnerability_20X
MNT-22553: bumped spring framework version to 5.3.7
2022-01-14 17:01:22 +00:00
Vitor Moreira
3411cd0c54 MNT-22553: bumped spring framework version to 5.3.7 2022-01-14 16:08:06 +00:00
Vítor Moreira
73cd9126ed PRODSEC-4483: replace simple-xml with simple-xml-safe
PRODSEC-4483: replace simple-xml with simple-xml-safe
2022-01-14 09:57:02 +00:00
Vítor Moreira
e2755e79ca PRODSEC-4483: replace simple-xml with simple-xml-safe
PRODSEC-4483: replace simple-xml with simple-xml-safe
(cherry picked from commit d3ec5229d21d6e6e71d6df88e818f6d9c05dbaf7)
2022-01-13 22:22:07 +00:00
Vítor Moreira
a6c51a6ceb MNT-22544: bumped jdom2 version to 2.0.6.1
MNT-22544: bumped jdom2 version to 2.0.6.1
2022-01-13 21:49:32 +00:00
Vitor Moreira
41b87f44cc MNT-22544: bumped jdom2 version to 2.0.6.1 2022-01-13 19:52:04 +00:00
Vítor Moreira
2314ba850b MNT-22548: bump org.restlet version (#1292) (#1298)
MNT-22548: bump org.restlet version
(cherry picked from commit 0053088a587d7e90573c1c0679789e3ee6e99c0f)
2021-12-20 16:39:37 +00:00
Vítor Moreira
7c811dda31 PRODSEC 4487 hadoop upgrade (#1285) (#1289)
PRODSEC-4487: Upgrade Apache Hadoop to version 2.7.7
(cherry picked from commit cd7a79c755ddaac3eb4ed9198df311fd5fdba6e4)
2021-12-14 10:49:55 +00:00
Vítor Moreira
bf9235fb8e MNT-22556: updated xmpcore version to 6.1.11 (#1270) (#1282)
MNT-22556: updated xmpcore version to 6.1.11
(cherry picked from commit 08e4c7a2df45126ac9a1abefa44732cf4d6c7cd2)
2021-12-07 19:07:59 +00:00
Vítor Moreira
5c240f31ba PROSEC-4479: Apache Tika updated to version 1.27 (#1281)
PROSEC-4479: Apache Tika updated to version 1.27
(cherry picked from commit a7cb3e672ec9a85e822b7ecb40010bc10303b1a9)
2021-12-06 19:14:40 +00:00
tiagosalvado10
5bc708becd Merge pull request #1273 from Alfresco/fix/MNT-22714_cascade-tracker-add-npe-validation_V20x
[MNT-22714] Added validation to prevent NPE when cascade tracker is disabled (#1272)
2021-11-30 20:03:19 +00:00
tiagosalvado10
9461cd397f Merge pull request #1272 from Alfresco/fix/MNT-22714_cascade-tracker-add-npe-validation
[MNT-22714] Added validation to prevent NPE when cascade tracker is disabled

(cherry picked from commit a2321e079a62c6afba61b48a5706f215505f7912)
2021-11-30 13:11:09 +00:00
Tom Page
dc9e9eb9c4 Merge pull request #1261 from Alfresco/feature/ACS-2071_UpdateLicensePlugin
ACS-2071 Update to use new release of license-maven-plugin.

(cherry picked from commit 6abf343f295ef82cf100155b3fc4663262f0aa24)
2021-11-03 11:16:25 +00:00
Travis CI User
d8bce2f248 [maven-release-plugin][skip ci]prepare for next development iteration 2021-07-14 10:56:35 +00:00
68 changed files with 1460 additions and 1167 deletions

View File

@@ -1 +0,0 @@
* alex.mukha@alfresco.com

View File

@@ -14,7 +14,12 @@ do
echo "Waiting for Service to start using endpoint: ${endpoint}"
until [[ "$(curl --output /dev/null -w ''%{http_code}'' --silent --head --fail ${endpoint})" == 200 ]] || [ "$COUNTER" -eq "$TIMEOUT" ]; do
additional_args=()
if [[ $endpoint == *"solr"* ]]; then
additional_args+=(-H "X-Alfresco-Search-Secret: secret")
fi
until [[ "$(curl --output /dev/null -w ''%{http_code}'' "${additional_args[@]}" --silent --head --fail ${endpoint})" == 200 ]] || [ "$COUNTER" -eq "$TIMEOUT" ]; do
printf '.'
sleep $WAIT_INTERVAL
COUNTER=$(($COUNTER+$WAIT_INTERVAL))

View File

@@ -3,21 +3,21 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-search-and-insight-parent</artifactId>
<version>2.0.2</version>
<version>2.0.3</version>
</parent>
<groupId>org.alfresco</groupId>
<artifactId>search-analytics-e2e-test</artifactId>
<name>Search Analytics E2E Tests</name>
<description>Test Project to test Search Service and Analytics Features on a complete setup of Alfresco, Share</description>
<properties>
<tas.rest.api.version>1.49</tas.rest.api.version>
<tas.cmis.api.version>1.16</tas.cmis.api.version>
<tas.utility.version>3.0.33</tas.utility.version>
<tas.rest.api.version>1.73</tas.rest.api.version>
<tas.cmis.api.version>1.31</tas.cmis.api.version>
<tas.utility.version>3.0.48</tas.utility.version>
<rm.version>3.3.1</rm.version>
<suiteXmlFile>src/test/resources/SearchSuite.xml</suiteXmlFile>
<test.exclude />
<test.include />
<jackson.databind.version>2.9.10.5</jackson.databind.version>
<jackson.databind.version>2.9.10.8</jackson.databind.version>
<licenseName>community</licenseName>
</properties>
<build>
@@ -129,7 +129,7 @@
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.12</version>
<version>1.18.20</version>
<scope>test</scope>
</dependency>
<dependency>
@@ -171,4 +171,4 @@
</snapshots>
</repository>
</repositories>
</project>
</project>

View File

@@ -129,7 +129,7 @@ def getSolrcoreReplacements(sharding, communication, fingerprint):
solrcoreReplacements['alfresco.encryption.ssl.truststore.location=.*'] = 'alfresco.encryption.ssl.truststore.location=\\\\\\/opt\\\\\\/alfresco-search-services\\\\\\/keystore\\\\\\/ssl-repo-client.truststore'
solrcoreReplacements['alfresco.encryption.ssl.truststore.type=.*'] = 'alfresco.encryption.ssl.truststore.type=JCEKS'
elif communication == 'none':
solrcoreReplacements['alfresco.secureComms=https'] = 'alfresco.secureComms=none'
solrcoreReplacements['alfresco.secureComms=https'] = r'alfresco.secureComms=none\\\\\\\nalfresco.allowUnauthenticatedSolrEndpoint=true'
else :
solrcoreReplacements['alfresco.secureComms=https'] = 'alfresco.secureComms=secret'
return solrcoreReplacements
@@ -325,6 +325,8 @@ if __name__ == '__main__':
if args.communication == 'mtls':
addAlfrescoMtlsConfig(dcYaml['services']['alfresco']['build']['args'])
addAlfrescoVolumes(dcYaml['services']['alfresco'])
elif args.communication == 'none':
dcYaml['services']['alfresco']['build']['args']['SOLR_COMMS'] = 'none'
if not args.share:
deleteServices(dcYaml, 'share', 'alfresco-pdf-renderer', 'imagemagick')

View File

@@ -42,6 +42,12 @@ RUN if [ "$$SOLR_COMMS" == "https" ] ; then \
truststoreFile=\"\/usr\/local\/tomcat\/alf_data\/keystore\/ssl.truststore\"\n\
truststorePass=\"$${TRUSTSTORE_PASS}\" truststoreType=\"$${TRUSTSTORE_TYPE}\" clientAuth=\"want\" sslProtocol=\"TLS\">\n\
<\/Connector>/g" $${TOMCAT_DIR}/conf/server.xml; \
elif [ "$$SOLR_COMMS" == "none" ] ; then \
sed -i "s/<filter-class>org.alfresco.web.app.servlet.AlfrescoX509ServletFilter<\/filter-class>/&\n\
<init-param>\n\
<param-name>allow-unauthenticated-solr-endpoint<\/param-name>\n\
<param-value>true<\/param-value>\n\
<\/init-param>/" $${TOMCAT_DIR}/webapps/alfresco/WEB-INF/web.xml; \
fi
# Expose keystore folder

View File

@@ -1,11 +0,0 @@
# docker-compose related environments
ALFRESCO_IMAGE=quay.io/alfresco/alfresco-governance-repository-enterprise
ALFRESCO_TAG=latest
SHARE_IMAGE=quay.io/alfresco/alfresco-governance-share-enterprise
SHARE_TAG=latest
POSTGRES_IMAGE=postgres
POSTGRES_TAG=10.1
SEARCH_IMAGE=quay.io/alfresco/insight-engine
SEARCH_TAG=latest
ACTIVEMQ_IMAGE=alfresco/alfresco-activemq
ACTIVEMQ_TAG=5.15.6

View File

@@ -1,9 +0,0 @@
include ../Makefile
include .env
# CURRENT_DIR is the folder where this Makefile is saved
CURRENT_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
start: ## 0 - starts search service with SSL enabled
$(dc) config && $(dc) up -d && \
make wait

View File

@@ -1,65 +0,0 @@
version: '3'
services:
alfresco:
image: ${ALFRESCO_IMAGE}:${ALFRESCO_TAG}
environment:
JAVA_OPTS : "
-Ddb.driver=org.postgresql.Driver
-Ddb.username=alfresco
-Ddb.password=alfresco
-Ddb.url=jdbc:postgresql://postgres:5432/alfresco
-Dsolr.host=search
-Dsolr.port=8983
-Dsolr.secureComms=none
-Dsolr.base.url=/solr
-Dindex.subsystem.name=solr6
-Dalfresco.restApi.basicAuthScheme=true
-Ddeployment.method=DOCKER_COMPOSE
-Dcsrf.filter.enabled=false
-Dmessaging.broker.url=\"failover:(nio://activemq:61616)?timeout=3000&jms.useCompression=true\"
-Xms1g -Xmx1g
"
ports:
- "7203:7203" #JMX connect via service:jmx:rmi:///jndi/rmi://localhost:7203/jmxrmi
- "5005:5005" #Java debugging
- "8081:8080" #Browser port for Alfresco
share:
image: ${SHARE_IMAGE}:${SHARE_TAG}
environment:
- REPO_HOST=alfresco
- REPO_PORT=8080
- "CATALINA_OPTS= -Xms500m -Xmx500m"
ports:
- 8082:8080 #Browser port for Share
postgres:
image: ${POSTGRES_IMAGE}:${POSTGRES_TAG}
environment:
- POSTGRES_PASSWORD=alfresco
- POSTGRES_USER=alfresco
- POSTGRES_DB=alfresco
ports:
- 5432:5432
search:
image: ${SEARCH_IMAGE}:${SEARCH_TAG}
environment:
#Solr needs to know how to register itself with Alfresco
- SOLR_ALFRESCO_HOST=alfresco
- SOLR_ALFRESCO_PORT=8080
#Alfresco needs to know how to call solr
- SOLR_SOLR_HOST=search
- SOLR_SOLR_PORT=8983
#Create the default alfresco and archive cores
- SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive
ports:
- 8083:8983 #Browser port
activemq:
image: ${ACTIVEMQ_IMAGE}:${ACTIVEMQ_TAG}
ports:
- 8161:8161 # Web Console
- 5672:5672 # AMQP
- 61616:61616 # OpenWire
- 61613:61613 # STOMP

View File

@@ -1,11 +0,0 @@
# docker-compose related environments
ALFRESCO_IMAGE=alfresco/alfresco-content-repository
ALFRESCO_TAG=6.1.0-EA3
SHARE_IMAGE=alfresco/alfresco-share
SHARE_TAG=6.0
POSTGRES_IMAGE=postgres
POSTGRES_TAG=10.1
SEARCH_IMAGE=quay.io/alfresco/search-services
SEARCH_TAG=latest
ACTIVEMQ_IMAGE=alfresco/alfresco-activemq
ACTIVEMQ_TAG=5.15.6

View File

@@ -1,68 +0,0 @@
include ../../Makefile
include .env
# the suffix of the backup taken in time. It can be overriden on runtime: make SUFIX=T1 backup-perform
SUFIX ?=T0
# CURRENT_DIR is the folder where this Makefile is saved
CURRENT_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
# this is used also in compose yml files
export HOST_BACKUP_LOCATION:=$(CURRENT_DIR)/host-bkp
ifeq ($(dc-backup),)
dc-backup:=$(dc) -f ../docker-compose.yml -f docker-compose.backup.yml
endif
backup-prepare: clean ## 1 - prepare backup for testing
@echo "Starting Backup Prepare" && \
$(sudo) rm -rf $(HOST_BACKUP_LOCATION) && \
mkdir -p $(HOST_BACKUP_LOCATION)/alf_data && \
mkdir -p $(HOST_BACKUP_LOCATION)/solr/archive && \
mkdir -p $(HOST_BACKUP_LOCATION)/solr/alfresco && \
mkdir -p $(HOST_BACKUP_LOCATION)/db && \
$(sudo) chmod -R 777 $(HOST_BACKUP_LOCATION) && \
$(dc-backup) up -d
backup-perform: ## 2 - perform the backup of alf_data and db data
@echo "Starting Backup Perform" && \
$(sudo) rm -rf $(HOST_BACKUP_LOCATION)_$(SUFIX) && \
$(sudo) chmod -R 777 $(HOST_BACKUP_LOCATION) && \
$(dc-backup) stop alfresco && \
$(dc-backup) exec postgres bash -c 'pg_dump --dbname=postgresql://alfresco:alfresco@127.0.0.1:5432/alfresco' > $(HOST_BACKUP_LOCATION)/db/alfresco.pg && \
cp -R $(HOST_BACKUP_LOCATION) $(HOST_BACKUP_LOCATION)_$(SUFIX) && \
$(dc-backup) start alfresco
backup-restore: clean ## 3 - start restoring from backup location
@echo "Starting Backup Restore" && \
$(sudo) rm -rf $(HOST_BACKUP_LOCATION) && \
mkdir -p $(HOST_BACKUP_LOCATION) && \
cp -rf $(HOST_BACKUP_LOCATION)_$(SUFIX)/alf_data $(HOST_BACKUP_LOCATION)/alf_data && \
cp -rf $(HOST_BACKUP_LOCATION)_$(SUFIX)/db/ $(HOST_BACKUP_LOCATION)/db/ && \
cp -rf $(HOST_BACKUP_LOCATION)_$(SUFIX)/solr $(HOST_BACKUP_LOCATION)/solr && \
$(sudo) chmod -R 777 $(HOST_BACKUP_LOCATION) && \
$(dc-backup) up -d postgres && sleep 30 && \
$(dc-backup) exec postgres bash -c 'psql --dbname=postgresql://alfresco:alfresco@127.0.0.1:5432/alfresco < /backup/db/alfresco.pg' && \
$(dc-backup) up -d
all: show-config ## 0 - executes the entire backup process
# perform the backup and waits until the server is starting
# do some change on backed up data
# then restore from backup and check the content is restored as expected
make backup-prepare wait && \
make run-mvn-tests suiteXmlFile=./src/test/resources/search-pre-backup-suite.xml
make backup-perform wait && \
make run-mvn-tests suiteXmlFile=./src/test/resources/search-on-backup-suite.xml
make backup-restore wait && \
make run-mvn-tests suiteXmlFile=./src/test/resources/search-post-backup-suite.xml
show-config: ## show compose configuration
$(dc-backup) config
clean: ## kill containers, remove volumes and data
$(dc-backup) kill && $(dc-backup) rm -fv
$(sudo) rm -rf $(HOST_BACKUP_LOCATION)
tail-logs: ## tails all container logs
$(dc-backup) logs -f

View File

@@ -1,49 +0,0 @@
# About
Testing the Backup of SearchService product
**Build Plan:** https://bamboo.alfresco.com/bamboo/browse/SAD-QAB
![](docs/backup.png?raw=true)
# Steps
* **a)** prepare the backup
```shel
make backup-prepare wait
```
>more details on Makefile [task](Makefile#L27).
* **b)** create some data manually or using automated tests found on this project
```shel
make run-mvn-tests suiteXmlFile=./src/test/resources/search-pre-backup-suite.xml
```
* **c)** perform the backup of data
```shel
make backup-perform wait
```
* **d)** now you can also update the data/remove it from TS, or even remove the entire volumes
```shel
make run-mvn-tests suiteXmlFile=./src/test/resources/search-on-backup-suite.xml
# or
make clean
```
* **e)** at any time you can restore the backup
```shel
make backup-restore wait
```
* **f)** now you can check the data from point **b)** is corectly recovered
```shel
make run-mvn-tests suiteXmlFile=./src/test/resources/search-post-backup-suite.xml
```
# All in one
At any time you can run the `make all` taks that will execute all the above commands for you
```shel
make all
```
# Environment Settings
Pay attention at the values that exist in [.env](.env) file. These settings will be picked up in custom docker-compose.*.yml file(s)

View File

@@ -1,39 +0,0 @@
version: '3'
services:
alfresco:
environment:
JAVA_OPTS : "
-Ddb.driver=org.postgresql.Driver
-Ddb.username=alfresco
-Ddb.password=alfresco
-Ddb.url=jdbc:postgresql://postgres:5432/alfresco
-Dsolr.host=search
-Dsolr.port=8983
-Dsolr.secureComms=none
-Dsolr.base.url=/solr
-Dindex.subsystem.name=solr6
-Dalfresco.restApi.basicAuthScheme=true
-Ddeployment.method=DOCKER_COMPOSE
-Dcsrf.filter.enabled=false
-Dmessaging.broker.url=\"failover:(nio://activemq:61616)?timeout=3000&jms.useCompression=true\"
-Dsolr.backup.alfresco.remoteBackupLocation=/backup/solr/alfresco/
-Dsolr.backup.alfresco.numberToKeep=1
-Dsolr.backup.archive.remoteBackupLocation=/backup/solr/archive/
-Dsolr.backup.archive.numberToKeep=1"
volumes:
- ${HOST_BACKUP_LOCATION}/alf_data:/usr/local/tomcat/alf_data
search:
environment:
- VERSION=${SEARCH_TAG}
image: ${SEARCH_IMAGE}:${SEARCH_TAG}
volumes:
- ${HOST_BACKUP_LOCATION}/solr:/backup/solr
postgres:
environment:
- POSTGRES_PASSWORD=alfresco
- POSTGRES_USER=alfresco
- POSTGRES_DB=alfresco
volumes:
- ${HOST_BACKUP_LOCATION}/db:/backup/db

Binary file not shown.

Before

Width:  |  Height:  |  Size: 258 KiB

View File

@@ -1,11 +0,0 @@
# docker-compose related environments
ALFRESCO_IMAGE=alfresco/alfresco-content-repository
ALFRESCO_TAG=6.1.0-EA3
SHARE_IMAGE=alfresco/alfresco-share
SHARE_TAG=6.0
POSTGRES_IMAGE=postgres
POSTGRES_TAG=10.1
SEARCH_IMAGE=quay.io/alfresco/search-services
SEARCH_TAG=latest
ACTIVEMQ_IMAGE=alfresco/alfresco-activemq
ACTIVEMQ_TAG=5.15.6

View File

@@ -1,16 +0,0 @@
ARG SEARCH_TAG=latest
FROM quay.io/alfresco/search-services:$SEARCH_TAG
LABEL creator="Paul Brodner" maintainer="Alfresco Search Services Team"
ARG SCRIPTS_FOLDER=
USER root
RUN echo " &" >> $DIST_DIR/solr/bin/search_config_setup.sh && \
echo "bash -c \"find $DIST_DIR/scripts/ -maxdepth 1 -type f -executable -name '*.sh' -exec {} \\;\"" >> $DIST_DIR/solr/bin/search_config_setup.sh && \
echo "bash -c \"tail -f $DIST_DIR/logs/solr.log\"" >> $DIST_DIR/solr/bin/search_config_setup.sh
USER solr
COPY ${SCRIPTS_FOLDER}/* ${DIST_DIR}/scripts/
# we need this, because we tail on it in the search_config_setup.sh (see above)
RUN touch ./logs/solr.log

View File

@@ -1,21 +0,0 @@
include ../../Makefile
include .env
# CURRENT_DIR is the folder where this Makefile is saved
CURRENT_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
ifeq ($(dc-custom),)
dc-custom:=$(dc) -f ../docker-compose.yml -f docker-compose.custom.yml
endif
## ---- CUSTOM
build: ## 1 - build a custom image: $ make SCRIPTS_FOLDER=spellcheck build
ifndef SCRIPTS_FOLDER
@echo SCRIPTS_FOLDER not defined "Usage: make SCRIPTS_FOLDER=spellcheck build"
exit 1
endif
$(dc-custom) build --force-rm --no-cache --pull --build-arg SCRIPTS_FOLDER=$(SCRIPTS_FOLDER)
start: ## 2 - starts the custom image built: $ make start
$(dc-custom) up -d && make wait

View File

@@ -1,25 +0,0 @@
# About
Start Search Service with a custom configuration
# Steps
* **a)** under `custom` folder create a new folder that will hold all settings
>checkout [spellcheck](.spellcheck) folder for example
>add here any shell scripts that will enable/disable a particular setting
* **b)** build the new image setting SCRIPTS_FOLDER to you folder already created
```shel
make SCRIPTS_FOLDER=spellcheck build
```
>notice that out [docker-compose.custom.yml](.custom/docker-compose.custom.yml) file is using a [Dockerfile](.custom/Dockerfile) to built you new image.
> at runtime, all shell scripts from your folder are executed and the settings are applied.
* **c)** the image is built locally, now start it up
```shel
make start
```
# Environment Settings
Pay attention at the values that exist in [.env](.env) file. These settings will be picked up in custom docker-compose.*.yml file(s)

View File

@@ -1,9 +0,0 @@
version: '3'
services:
search:
build:
context: ./custom
dockerfile: Dockerfile
image: quay.io/alfresco/search-services-custom:${SEARCH_TAG}
volumes:
- .:/backup

View File

@@ -1,19 +0,0 @@
#!/usr/bin/env bash
set -ex
echo "Enabling SpellCheck"
cat <<EOF >> /opt/alfresco-search-services/solrhome/conf/shared.properties
# Enabling SpellCheck
# configuration:
# * http://docs.alfresco.com/6.0/concepts/solr-shared-properties.html
# * https://docs.alfresco.com/5.2/tasks/solr6-install-withoutSSL.html
# test it: http://docs.alfresco.com/6.0/concepts/search-api-spellcheck.html
# Suggestable Properties
alfresco.suggestable.property.0={http://www.alfresco.org/model/content/1.0}name
alfresco.suggestable.property.1={http://www.alfresco.org/model/content/1.0}title
alfresco.suggestable.property.2={http://www.alfresco.org/model/content/1.0}description
alfresco.suggestable.property.3={http://www.alfresco.org/model/content/1.0}content
EOF

View File

@@ -1,63 +0,0 @@
version: '3'
services:
alfresco:
image: ${ALFRESCO_IMAGE}:${ALFRESCO_TAG}
environment:
JAVA_OPTS : "
-Ddb.driver=org.postgresql.Driver
-Ddb.username=alfresco
-Ddb.password=alfresco
-Ddb.url=jdbc:postgresql://postgres:5432/alfresco
-Dsolr.host=search
-Dsolr.port=8983
-Dsolr.secureComms=none
-Dsolr.base.url=/solr
-Dindex.subsystem.name=solr6
-Dalfresco.restApi.basicAuthScheme=true
-Ddeployment.method=DOCKER_COMPOSE
-Dcsrf.filter.enabled=false
-Dmessaging.broker.url=\"failover:(nio://activemq:61616)?timeout=3000&jms.useCompression=true\"
"
ports:
- "7203:7203" #JMX connect via service:jmx:rmi:///jndi/rmi://localhost:7203/jmxrmi
- "5005:5005" #Java debugging
- "8081:8080" #Browser port for Alfresco
share:
image: ${SHARE_IMAGE}:${SHARE_TAG}
environment:
- REPO_HOST=alfresco
- REPO_PORT=8080
ports:
- 8082:8080 #Browser port for Share
postgres:
image: ${POSTGRES_IMAGE}:${POSTGRES_TAG}
environment:
- POSTGRES_PASSWORD=alfresco
- POSTGRES_USER=alfresco
- POSTGRES_DB=alfresco
ports:
- 5432:5432
search:
image: ${SEARCH_IMAGE}:${SEARCH_TAG}
environment:
#Solr needs to know how to register itself with Alfresco
- SOLR_ALFRESCO_HOST=alfresco
- SOLR_ALFRESCO_PORT=8080
#Alfresco needs to know how to call solr
- SOLR_SOLR_HOST=search
- SOLR_SOLR_PORT=8983
#Create the default alfresco and archive cores
- SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive
ports:
- 8083:8983 #Browser port
activemq:
image: ${ACTIVEMQ_IMAGE}:${ACTIVEMQ_TAG}
ports:
- 8161:8161 # Web Console
- 5672:5672 # AMQP
- 61616:61616 # OpenWire
- 61613:61613 # STOMP

View File

@@ -1,11 +0,0 @@
# docker-compose related environments
ALFRESCO_IMAGE=alfresco/alfresco-content-repository
ALFRESCO_TAG=6.1.0-EA3
SHARE_IMAGE=alfresco/alfresco-share
SHARE_TAG=6.0
POSTGRES_IMAGE=postgres
POSTGRES_TAG=10.1
SEARCH_IMAGE=quay.io/alfresco/search-services
SEARCH_TAG=latest
ACTIVEMQ_IMAGE=alfresco/alfresco-activemq
ACTIVEMQ_TAG=5.15.6

View File

@@ -1,22 +0,0 @@
# About
Start Alfresco services and scale SOLR to multiple instances, behind a LB.
# Steps
* **a)** Start Alfresco
```
docker-compose up -d
```
* **b)** Scale SOLR to 2 instances
```
docker-compose scale solr=2
```
>it's possible at this time to restart `alfresco` service if there are not results returned by LB
```
docker-compose restart alfresco
```

View File

@@ -1,72 +0,0 @@
version: '3'
services:
alfresco:
image: ${ALFRESCO_IMAGE}:${ALFRESCO_TAG}
environment:
JAVA_OPTS : "
-Ddb.driver=org.postgresql.Driver
-Ddb.username=alfresco
-Ddb.password=alfresco
-Ddb.url=jdbc:postgresql://postgres:5432/alfresco
-Dsolr.host=search
-Dsolr.port=80
-Dsolr.secureComms=none
-Dsolr.base.url=/solr
-Dindex.subsystem.name=solr6
-Dalfresco.restApi.basicAuthScheme=true
-Ddeployment.method=DOCKER_COMPOSE
-Dcsrf.filter.enabled=false
-Dmessaging.broker.url=\"failover:(nio://activemq:61616)?timeout=3000&jms.useCompression=true\"
"
ports:
- "7203:7203" #JMX connect via service:jmx:rmi:///jndi/rmi://localhost:7203/jmxrmi
- "5005:5005" #Java debugging
- "8081:8080" #Browser port for Alfresco
share:
image: ${SHARE_IMAGE}:${SHARE_TAG}
environment:
- REPO_HOST=alfresco
- REPO_PORT=8080
ports:
- 8082:8080 #Browser port for Share
postgres:
image: ${POSTGRES_IMAGE}:${POSTGRES_TAG}
environment:
- POSTGRES_PASSWORD=alfresco
- POSTGRES_USER=alfresco
- POSTGRES_DB=alfresco
ports:
- 5432:5432
search:
image: dockercloud/haproxy
links:
- solr
ports:
- 8083:80 #Browser port
volumes:
- /var/run/docker.sock:/var/run/docker.sock
solr:
image: ${SEARCH_IMAGE}:${SEARCH_TAG}
environment:
#Solr needs to know how to register itself with Alfresco
- SOLR_ALFRESCO_HOST=alfresco
- SOLR_ALFRESCO_PORT=8080
#Alfresco needs to know how to call solr
- SOLR_SOLR_HOST=search
- SOLR_SOLR_PORT=8983
#Create the default alfresco and archive cores
- SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive
ports:
- 8983 #Browser port
activemq:
image: ${ACTIVEMQ_IMAGE}:${ACTIVEMQ_TAG}
ports:
- 8161:8161 # Web Console
- 5672:5672 # AMQP
- 61616:61616 # OpenWire
- 61613:61613 # STOMP

View File

@@ -1,11 +0,0 @@
# docker-compose related environments
ALFRESCO_IMAGE=alfresco/alfresco-content-repository
ALFRESCO_TAG=6.1.0-EA3
SHARE_IMAGE=alfresco/alfresco-share
SHARE_TAG=6.0
POSTGRES_IMAGE=postgres
POSTGRES_TAG=10.1
SEARCH_IMAGE=quay.io/alfresco/search-services
SEARCH_TAG=latest
ACTIVEMQ_IMAGE=alfresco/alfresco-activemq
ACTIVEMQ_TAG=5.15.6

View File

@@ -1,41 +0,0 @@
include ../../Makefile
include .env
# CURRENT_DIR is the folder where this Makefile is saved
CURRENT_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
ifeq ($(dc-upgrade),)
dc-upgrade:=$(dc) -f ../docker-compose.yml -f docker-compose.upgrade.yml
endif
## ---- UPGRADE
set_version ?=latest # default version that will be used in tasks
as-previous: clean ## 1 - install the previous version: $ make set_version=1.2.1 as-previous
rm -rf ./solr-data && \
rm -rf ./solr-contentstore && \
rm -f ./image-digests.txt
export SEARCH_TAG=$(set_version) && \
$(dc-upgrade) pull && \
echo "\n====Previous====" > image-digests.txt && \
$(dc-upgrade) config --resolve-image-digests >> image-digests.txt && \
$(dc-upgrade) up -d && \
docker ps
as-current: ## 2 - upgrade previous to this version $ make set_version=2.0.x as-current
$(dc-upgrade) kill search && \
$(dc-upgrade) rm -f search && \
export SEARCH_TAG=$(set_version) && \
$(dc-upgrade) pull search && \
echo "\n====Current====" >> image-digests.txt && \
$(dc-upgrade) config --resolve-image-digests >> image-digests.txt && \
$(dc-upgrade) up -d search && \
docker ps
#
# Run the following commands if you need to test the upgrade e2e
#
#make set_version=1.2.1 as-previous wait
#make run-mvn-tests suiteXmlFile=./src/test/resources/search-pre-upgrade-suite.xml
#make set_version=2.0.x as-current wait
#make run-mvn-tests suiteXmlFile=./src/test/resources/search-post-upgrade-suite.xml

View File

@@ -1,31 +0,0 @@
# About
Testing the Upgrade of SearchService product
**Build Plan:** https://bamboo.alfresco.com/bamboo/browse/SAD-QAUP
![](docs/upgrade.png?raw=true)
# Steps
* **a)** start the initial version
```shel
make set_version=1.2.1 as-previous wait
```
>notice that new folders will appear on you "upgrade" folder with data from container(s)
* **b)** create some data manually or using automated tests found on this project
```shel
make run-mvn-tests suiteXmlFile=./src/test/resources/search-pre-upgrade-suite.xml
```
* **c)** now upgrade to new version
```shel
make set_version=2.0.x as-current wait
```
* **d)** and test that upgrade data exist
```shel
make run-mvn-tests suiteXmlFile=./src/test/resources/search-post-upgrade-suite.xml
```
# Environment Settings
Pay attention at the values that exist in [.env](.env) file. These settings will be picked up in custom docker-compose.*.yml file(s)

View File

@@ -1,10 +0,0 @@
version: '3'
services:
search:
environment:
- VERSION=${SEARCH_TAG}
image: quay.io/alfresco/search-services:${SEARCH_TAG}
volumes:
- "./upgrade/solr-data:/opt/alfresco-search-services/data"
- "./upgrade/solr-contentstore:/opt/alfresco-search-services/contentstore"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 216 KiB

View File

@@ -1,15 +0,0 @@
# docker-compose related environments
ALFRESCO_IMAGE=alfresco/alfresco-content-repository
ALFRESCO_TAG=6.1.0-EA3
SHARE_IMAGE=alfresco/alfresco-share
SHARE_TAG=6.0
POSTGRES_IMAGE=postgres
POSTGRES_TAG=10.1
SEARCH_IMAGE=quay.io/alfresco/search-services
SEARCH_TAG=latest
DIST_DIR_PATH=/opt/alfresco-search-services
#SEARCH_IMAGE=quay.io/alfresco/insight-engine
#SEARCH_TAG=lates
#DIST_DIR_PATH=/opt/alfresco-insight-engine
ACTIVEMQ_IMAGE=alfresco/alfresco-activemq
ACTIVEMQ_TAG=5.15.6

View File

@@ -1,39 +0,0 @@
include ../Makefile
include .env
# CURRENT_DIR is the folder where this Makefile is saved
CURRENT_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
SEARCH_IMAGE ?=quay.io/alfresco/search-services
SEARCH_TAG ?=latest # default version that will be used in tasks
as-previous: clean ## 1 - install the previous version: $ make SEARCH_IMAGE=quay.io/alfresco/search-services SEARCH_TAG=1.2.1 as-previous
rm -rf ./solr-data && \
rm -rf ./solr-contentstore && \
rm -f ./image-digests.txt && \
export SEARCH_TAG=$(SEARCH_TAG) && \
export SEARCH_IMAGE=$(SEARCH_IMAGE) && \
$(dc) pull && \
echo "\n====Previous====" > image-digests.txt && \
$(dc) config --resolve-image-digests >> image-digests.txt && \
$(dc) up -d && \
docker ps
as-current: ## 2 - upgrade previous to this version $ make SEARCH_IMAGE=quay.io/alfresco/search-services SEARCH_TAG=2.0.x as-current
$(dc) kill search && \
$(dc) rm -f search && \
export SEARCH_TAG=$(SEARCH_TAG) && \
export SEARCH_IMAGE=$(SEARCH_IMAGE) && \
$(dc) pull search && \
echo "\n====Current====" >> image-digests.txt && \
$(dc) config --resolve-image-digests >> image-digests.txt && \
$(dc) up -d search && \
docker ps
#
# Run the following commands if you need to test the upgrade e2e
#
#make SEARCH_IMAGE=quay.io/alfresco/search-services SEARCH_TAG=1.2.1 as-previous wait
#make run-mvn-tests suiteXmlFile=./src/test/resources/search-pre-upgrade-suite.xml
#make SEARCH_IMAGE=quay.io/alfresco/search-services SEARCH_TAG=2.0.x as-current wait
#make run-mvn-tests suiteXmlFile=./src/test/resources/search-post-upgrade-suite.xml

View File

@@ -1,66 +0,0 @@
version: '3'
services:
alfresco:
image: ${ALFRESCO_IMAGE}:${ALFRESCO_TAG}
environment:
JAVA_OPTS : "
-Ddb.driver=org.postgresql.Driver
-Ddb.username=alfresco
-Ddb.password=alfresco
-Ddb.url=jdbc:postgresql://postgres:5432/alfresco
-Dsolr.host=search
-Dsolr.port=8983
-Dsolr.secureComms=none
-Dsolr.base.url=/solr
-Dindex.subsystem.name=solr6
-Dalfresco.restApi.basicAuthScheme=true
-Ddeployment.method=DOCKER_COMPOSE
-Dcsrf.filter.enabled=false
-Dmessaging.broker.url=\"failover:(nio://activemq:61616)?timeout=3000&jms.useCompression=true\"
"
ports:
- "7203:7203" #JMX connect via service:jmx:rmi:///jndi/rmi://localhost:7203/jmxrmi
- "5005:5005" #Java debugging
- "8081:8080" #Browser port for Alfresco
share:
image: ${SHARE_IMAGE}:${SHARE_TAG}
environment:
- REPO_HOST=alfresco
- REPO_PORT=8080
ports:
- 8082:8080 #Browser port for Share
postgres:
image: ${POSTGRES_IMAGE}:${POSTGRES_TAG}
environment:
- POSTGRES_PASSWORD=alfresco
- POSTGRES_USER=alfresco
- POSTGRES_DB=alfresco
ports:
- 5432:5432
search:
image: ${SEARCH_IMAGE}:${SEARCH_TAG}
environment:
#Solr needs to know how to register itself with Alfresco
- SOLR_ALFRESCO_HOST=alfresco
- SOLR_ALFRESCO_PORT=8080
#Alfresco needs to know how to call solr
- SOLR_SOLR_HOST=search
- SOLR_SOLR_PORT=8983
#Create the default alfresco and archive cores
- SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive
ports:
- 8083:8983 #Browser port
volumes:
- "./upgrade/solr-data:${DIST_DIR_PATH}/data"
- "./upgrade/solr-contentstore:/opt/${DIST_DIR_PATH}/contentstore"
activemq:
image: ${ACTIVEMQ_IMAGE}:${ACTIVEMQ_TAG}
ports:
- 8161:8161 # Web Console
- 5672:5672 # AMQP
- 61616:61616 # OpenWire
- 61613:61613 # STOMP

View File

@@ -0,0 +1,102 @@
/*
* #%L
* Alfresco Search Services E2E Test
* %%
* Copyright (C) 2005 - 2020 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.test.search.functional.searchServices.solr.admin;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotEquals;
import static org.testng.Assert.assertTrue;
import java.util.Random;
import java.util.UUID;
import java.util.stream.IntStream;
import org.alfresco.rest.core.RestResponse;
import org.alfresco.test.search.functional.AbstractE2EFunctionalTest;
import org.alfresco.utility.Utility;
import org.alfresco.utility.model.FileModel;
import org.alfresco.utility.model.FileType;
import org.testng.annotations.Test;
/** E2E tests for the SUMMARY admin report. */
public class SolrE2ESummaryTest extends AbstractE2EFunctionalTest
{
/** The maximum time to wait for a report to update (in ms). */
private static final int MAX_TIME = 60 * 1000;
/** The frequency to check the report (in ms). */
private static final int RETRY_INTERVAL = 100;
/** Check the FTS section of the admin summary contains the expected fields. */
@Test
public void testFTSReport() throws Exception
{
RestResponse response = restClient.withParams("core=alfresco").withSolrAdminAPI().getAction("SUMMARY");
int toUpdate = response.getResponse().body().jsonPath().get("Summary.alfresco.FTS.'Node count whose content needs to be updated'");
assertTrue(toUpdate >= 0, "Expecting non-negative pieces of content to need updating.");
int inSync = response.getResponse().body().jsonPath().get("Summary.alfresco.FTS.'Node count whose content is in sync'");
assertTrue(inSync >= 0, "Expecting non-negative pieces of content to need updating.");
}
/** Check that we can spot a document updating by using the SUMMARY report. */
@Test
public void testFTSReport_contentUpdate() throws Exception
{
RestResponse response2 = restClient.withParams("core=alfresco").withSolrAdminAPI().getAction("SUMMARY");
int previousInSync = response2.getResponse().body().jsonPath().get("Summary.alfresco.FTS.'Node count whose content is in sync'");
FileModel file = new FileModel("file.txt", "file.txt", "", FileType.TEXT_PLAIN, "file.txt");
FileModel content = dataContent.usingUser(adminUserModel).usingSite(testSite).createContent(file);
// Wait for the number of "in-sync" documents to increase (i.e. when the document is indexed).
Utility.sleep(RETRY_INTERVAL, MAX_TIME, () -> {
RestResponse response = restClient.withParams("core=alfresco").withSolrAdminAPI().getAction("SUMMARY");
int inSync = response.getResponse().body().jsonPath().get("Summary.alfresco.FTS.'Node count whose content is in sync'");
assertTrue(inSync > previousInSync, "Expected a document to be indexed.");
});
// Wait for the number of outdated documents to become zero.
Utility.sleep(RETRY_INTERVAL, MAX_TIME, () ->
{
RestResponse response = restClient.withParams("core=alfresco").withSolrAdminAPI().getAction("SUMMARY");
int toUpdate = response.getResponse().body().jsonPath().get("Summary.alfresco.FTS.'Node count whose content needs to be updated'");
assertEquals(toUpdate, 0, "Expected number of outdated documents to drop to zero.");
});
// Update the document's content with a large amount of text.
StringBuilder largeText = new StringBuilder("Big update");
IntStream.range(0, 100000).forEach((i) -> largeText.append(" ").append(UUID.randomUUID().toString()));
dataContent.usingUser(adminUserModel).usingResource(content).updateContent(largeText.toString());
// Expect to spot the number of outdated documents increase beyond zero.
Utility.sleep(RETRY_INTERVAL, MAX_TIME, () ->
{
RestResponse response = restClient.withParams("core=alfresco").withSolrAdminAPI().getAction("SUMMARY");
int toUpdate = response.getResponse().body().jsonPath().get("Summary.alfresco.FTS.'Node count whose content needs to be updated'");
assertNotEquals(toUpdate, 0, "Expected number of outdated documents to be greater than zero.");
});
}
}

View File

@@ -19,6 +19,7 @@ rest.rmPath=alfresco/api/-default-/public/gs/versions/1
solr.scheme=http
solr.server=localhost
solr.port=8083
solr.secret=secret
#Solr Indexing Time
# Use 1s and 60 attempts, see AbstractE2EFunctionalTest.SEARCH_MAX_ATTEMPTS

123
pom.xml
View File

@@ -7,7 +7,7 @@
<version>12</version>
</parent>
<artifactId>alfresco-search-and-insight-parent</artifactId>
<version>2.0.2</version>
<version>2.0.3</version>
<packaging>pom</packaging>
<name>Alfresco Search And Insight Parent</name>
<distributionManagement>
@@ -20,26 +20,139 @@
<url>https://artifacts.alfresco.com/nexus/content/repositories/enterprise-snapshots/</url>
</snapshotRepository>
</distributionManagement>
<scm>
<repositories>
<repository>
<id>atlassian</id>
<url>https://packages.atlassian.com/maven-3rdparty/</url>
<snapshots>
<enabled>false</enabled>
<checksumPolicy>warn</checksumPolicy>
</snapshots>
<releases>
<enabled>true</enabled>
<checksumPolicy>warn</checksumPolicy>
</releases>
</repository>
</repositories>
<scm>
<connection>scm:git:https://github.com/Alfresco/InsightEngine.git</connection>
<developerConnection>scm:git:https://github.com/Alfresco/InsightEngine.git</developerConnection>
<url>https://github.com/Alfresco/InsightEngine</url>
<tag>2.0.2</tag>
<tag>2.0.3</tag>
</scm>
<properties>
<maven.build.sourceVersion>11</maven.build.sourceVersion>
<solr.base.version>6.6.5</solr.base.version>
<solr.version>${solr.base.version}-patched.6</solr.version>
<solr.version>${solr.base.version}-patched.9</solr.version>
<!-- The location to download the solr zip file from. -->
<!-- <solr.zip>https://archive.apache.org/dist/lucene/solr/${solr.version}/solr-${solr.version}.zip</solr.zip> -->
<!-- Solr startup scripts do not work with any Java version higher than 9 so the scripts have been patched -->
<solr.zip>https://artifacts.alfresco.com/nexus/content/repositories/public/org/apache/solr/solr/solr-${solr.version}/solr-solr-${solr.version}.zip</solr.zip>
<solr.directory>${project.build.directory}/solr-${solr.version}</solr.directory>
<license-maven-plugin.version>2.0.0</license-maven-plugin.version>
<license-maven-plugin.version>2.0.1.alfresco-1</license-maven-plugin.version>
<licenseName>enterprise</licenseName>
<license.update.dryrun>true</license.update.dryrun>
<license.update.copyright>false</license.update.copyright>
<dependency.apache-commons-compress.version>1.21</dependency.apache-commons-compress.version>
<dependency.jakarta.xml.bind-api.version>3.0.1</dependency.jakarta.xml.bind-api.version>
<dependency.tika.version>1.27</dependency.tika.version>
<dependency.hadoop.version>2.7.7</dependency.hadoop.version>
<dependency.restlet.version>2.3.12</dependency.restlet.version>
<dependency.jdom2.version>2.0.6.1</dependency.jdom2.version>
<dependency.spring.version>5.3.7</dependency.spring.version>
<dependency.httpclient.version>4.5.13</dependency.httpclient.version>
<dependency.codehaus.jackson.version>1.9.14-atlassian-6</dependency.codehaus.jackson.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
<version>${dependency.apache-commons-compress.version}</version>
</dependency>
<dependency>
<groupId>org.apache.tika</groupId>
<artifactId>tika-core</artifactId>
<version>${dependency.tika.version}</version>
</dependency>
<dependency>
<groupId>org.apache.tika</groupId>
<artifactId>tika-java7</artifactId>
<version>${dependency.tika.version}</version>
</dependency>
<dependency>
<groupId>org.apache.tika</groupId>
<artifactId>tika-parsers</artifactId>
<version>${dependency.tika.version}</version>
</dependency>
<dependency>
<groupId>org.apache.tika</groupId>
<artifactId>tika-xmp</artifactId>
<version>${dependency.tika.version}</version>
</dependency>
<dependency>
<groupId>jakarta.xml.bind</groupId>
<artifactId>jakarta.xml.bind-api</artifactId>
<version>${dependency.jakarta.xml.bind-api.version}</version>
</dependency>
<dependency>
<groupId>com.adobe.xmp</groupId>
<artifactId>xmpcore</artifactId>
<version>6.1.11</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<version>${dependency.hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<version>${dependency.hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${dependency.hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${dependency.hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.restlet.jee</groupId>
<artifactId>org.restlet</artifactId>
<version>${dependency.restlet.version}</version>
</dependency>
<dependency>
<groupId>org.restlet.jee</groupId>
<artifactId>org.restlet.ext.servlet</artifactId>
<version>${dependency.restlet.version}</version>
</dependency>
<dependency>
<groupId>org.jdom</groupId>
<artifactId>jdom2</artifactId>
<version>${dependency.jdom2.version}</version>
</dependency>
<!-- spring framework is defined in "search-services" and "insight-engine" because "e2e-test" uses different versions -->
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>${dependency.httpclient.version}</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
<version>${dependency.codehaus.jackson.version}</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<version>${dependency.codehaus.jackson.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
<modules>
<module>search-services</module>
<module>insight-engine</module>

View File

@@ -158,10 +158,10 @@ $ unzip alfresco-search-services-*.zip
$ cd alfresco-search-services
```
Change default Alfresco Communication protocol to `none`.
Change default Alfresco Communication protocol to `none`, and set `alfresco.allowUnauthenticatedSolrEndpoint` to `true`:
```bash
$ sed -i 's/alfresco.secureComms=https/alfresco.secureComms=none/' solrhome/templates/rerank/conf/solrcore.properties
$ sed -i 's/alfresco.secureComms=https/alfresco.secureComms=none\nalfresco.allowUnauthenticatedSolrEndpoint=true/' solrhome/templates/rerank/conf/solrcore.properties
```
*Note* Above line is written in GNU sed, you can use `gsed` from Mac OS X or just edit the file with a Text Editor.
@@ -293,8 +293,8 @@ The following environment variables are supported:
| SEARCH_LOG_LEVEL | ERROR, WARN, INFO, DEBUG or TRACE | The root logger level. |
| ENABLE_SPELLCHECK | true or false | Whether spellchecking is enabled or not. |
| DISABLE_CASCADE_TRACKING | true or false | Whether cascade tracking is enabled or not. Disabling cascade tracking will improve performance, but result in some feature loss (e.g. path queries). |
| ALFRESCO_SECURE_COMMS | https or none | Whether communication with the repository is secured. See below. |
| SOLR_SSL_... | --- | These variables are also used to configure SSL. See below. |
| ALFRESCO_SECURE_COMMS | secret or https | This property instructs Solr if it should enable Shared Secret authentication or mTLS authentication with HTTPS. See below. |
**Using Mutual Auth TLS (SSL)**
@@ -328,20 +328,39 @@ SOLR Web Console will be available at:
*Note* You must install the `browser.p12` certificate in your browser in order to access to this URL.
**Using Plain HTTP**
**Using Shared Secret Authentication**
By default Docker image is using SSL, so it's required to add an environment variable `ALFRESCO_SECURE_COMMS=none` to use SOLR in plain HTTP mode.
An alternative is to use a shared secret in order to secure repo <-> solr communication. You just need to set `ALFRESCO_SECURE_COMMS=secret` **AND** `JAVA_TOOL_OPTIONS="-Dalfresco.secureComms.secret=my_super_secret_secret"`.
By default, the SOLR Web Console will be available at:
[http://localhost:8983/solr](http://localhost:8983/solr)
but you can also start the Jetty server in SSL mode as explained above, in that case the SOLR Web Console will be available at:
[https://localhost:8983/solr](https://localhost:8983/solr)
*Note* You must install the `browser.p12` certificate in your browser in order to access to this URL.
In both cases, when trying to access the SOLR Web Console you will have to provide the `X-Alfresco-Search-Secret` header in the request, specifying as its value the same value that was used for the `-Dalfresco.secureComms.secret` property.
You can do so natively on Safari through the `Dev Tools > Local Overrides` feature, or with a browser extension on Google Chrome/Firefox/Opera/Edge: [ModHeader](https://modheader.com/).
**Using Shared Secret Authentication**
By default Docker image is using SSL, so it's required to add an environment variable `ALFRESCO_SECURE_COMMS=secret` AND `JAVA_TOOL_OPTIONS="-Dalfresco.secureComms.secret=my_super_secret_secret"` to use SOLR with Shared Secret authentication.
To run the docker image:
```bash
$ docker run -p 8983:8983 -e ALFRESCO_SECURE_COMMS=none -e SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive searchservices:develop
$ docker run -p 8983:8983 -e ALFRESCO_SECURE_COMMS=secret -e SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive -e JAVA_TOOL_OPTIONS="-Dalfresco.secureComms.secret=my_super_secret_secret" searchservices:develop
```
SOLR Web Console will be available at:
[http://localhost:8983/solr](http://localhost:8983/solr)
You will have to provide the `X-Alfresco-Search-Secret` header in the request, specifying as its value the same value that was used for the `-Dalfresco.secureComms.secret` property.
**Enabling YourKit Java Profiler**
This Docker Image includes [YourKit Java Profiler](https://www.yourkit.com/java/profiler/) server service. In order to enable this service, so the SOLR JVM can be inspected with the YourKit local program, additional configuration is required to set the YourKit `agentpath`. Mapping the exposed profiling port (10001 by default) is also required.
@@ -369,13 +388,16 @@ solr6:
SOLR_SOLR_HOST: "solr6"
SOLR_SOLR_PORT: "8983"
# HTTP settings
ALFRESCO_SECURE_COMMS: "none"
ALFRESCO_SECURE_COMMS: "secret"
#Create the default alfresco and archive cores
SOLR_CREATE_ALFRESCO_DEFAULTS: "alfresco,archive"
SOLR_JAVA_MEM: "-Xms2g -Xmx2g"
SOLR_OPTS: "
-agentpath:/usr/local/YourKit-JavaProfiler-2019.8/bin/linux-x86-64/libyjpagent.so=port=10001,listen=all
"
JAVA_TOOL_OPTIONS: "
-Dalfresco.secureComms.secret=my_super_secret_secret
"
ports:
- 8083:8983 #Browser port
- 10001:10001 #YourKit port
@@ -400,7 +422,7 @@ During deployment time whenever Search Services or Insight Engine image starts,
To run the docker image:
```bash
$ docker run -p 8984:8983 -e REPLICATION_TYPE=slave -e ALFRESCO_SECURE_COMMS=none -e SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive searchservices:develop
$ docker run -p 8984:8983 -e REPLICATION_TYPE=slave -e ALFRESCO_SECURE_COMMS=secret -e SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive -e JAVA_TOOL_OPTIONS="-Dalfresco.secureComms.secret=my_super_secret_secret" searchservices:develop
```
Solr-slave End point: [http://localhost:8984/solr](http://localhost:8984/solr)
@@ -408,7 +430,7 @@ To generate your own Docker-compose file please follow [generator-alfresco-docke
### Use Alfresco Search Services Docker Image with Docker Compose
Sample configuration in a Docker Compose file using **Plain HTTP** protocol to communicate with Alfresco Repository.
Sample configuration in a Docker Compose file using **Shared Secret Authentication** to communicate with Alfresco Repository.
```
solr6:
@@ -422,10 +444,13 @@ solr6:
SOLR_SOLR_HOST: "solr6"
SOLR_SOLR_PORT: "8983"
# HTTP settings
ALFRESCO_SECURE_COMMS: "none"
ALFRESCO_SECURE_COMMS: "secret"
#Create the default alfresco and archive cores
SOLR_CREATE_ALFRESCO_DEFAULTS: "alfresco,archive"
SOLR_JAVA_MEM: "-Xms2g -Xmx2g"
JAVA_TOOL_OPTIONS: "
-Dalfresco.secureComms.secret=my_super_secret_secret
"
ports:
- 8083:8983 #Browser port
```
@@ -434,6 +459,8 @@ SOLR Web Console will be available at:
[http://localhost:8983/solr](http://localhost:8983/solr)
You will have to provide the `X-Alfresco-Search-Secret` header in the request, specifying as its value the same value that was used for the `-Dalfresco.secureComms.secret` property.
Sample configuration in a Docker Compose file using **Mutual Auth TLS (SSL)** protocol to communicate with Alfresco Repository.

View File

@@ -6,7 +6,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-search-parent</artifactId>
<version>2.0.2</version>
<version>2.0.3</version>
<relativePath>../pom.xml</relativePath>
</parent>
@@ -15,12 +15,16 @@
<dependency>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-solrclient-lib</artifactId>
<version>2.0.2</version>
<version>2.0.3</version>
<exclusions>
<exclusion>
<artifactId>servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
@@ -42,6 +46,10 @@
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
@@ -50,6 +58,12 @@
<artifactId>solr-analysis-extras</artifactId>
<version>${solr.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@@ -74,6 +88,10 @@
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
@@ -82,6 +100,23 @@
<artifactId>solr-clustering</artifactId>
<version>${solr.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.simpleframework</groupId>
<artifactId>simple-xml</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- replace simple-xml from solr-clustering with simple-xml-safe -->
<dependency>
<groupId>com.carrotsearch.thirdparty</groupId>
<artifactId>simple-xml-safe</artifactId>
<version>2.7.1</version>
<scope>provided</scope>
</dependency>
<dependency>
@@ -92,7 +127,7 @@
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<artifactId>slf4j-reload4j</artifactId>
<version>${slf4j.version}</version>
</dependency>
@@ -169,6 +204,12 @@
<version>1.1.4c</version>
</dependency>
<dependency>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-xmlfactory</artifactId>
<version>1.3</version>
</dependency>
<!-- DATE Functions (YEAR, MONTH, ...) are broken in Calcite 1.11.0 (default
version provided by SOLR 6.6.x)
Upgrading manually Calcite version to 1.15.0
@@ -188,6 +229,10 @@
<artifactId>avatica-core</artifactId>
<version>1.13.0</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
</dependency>
<!-- Test dependencies -->
<dependency>
@@ -209,6 +254,12 @@
<artifactId>solr-test-framework</artifactId>
<version>${solr.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.carrotsearch.randomizedtesting</groupId>

View File

@@ -45,7 +45,6 @@ import org.alfresco.solr.tracker.TrackerRegistry;
import org.alfresco.solr.utils.Utils;
import org.alfresco.util.Pair;
import org.alfresco.util.shard.ExplicitShardingPolicy;
import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager;
import org.apache.commons.io.FileUtils;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CoreAdminParams;
@@ -314,7 +313,6 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
AlfrescoSolrDataModel.getInstance().close();
SOLRAPIClientFactory.close();
MultiThreadedHttpConnectionManager.shutdownAll();
coreNames().forEach(trackerRegistry::removeTrackersForCore);
informationServers.clear();

View File

@@ -502,7 +502,10 @@ class HandlerReportHelper
coreSummary.add("ContentTracker Enabled", contentTrkr.isEnabled());
coreSummary.add("MetadataTracker Enabled", metaTrkr.isEnabled());
coreSummary.add("AclTracker Enabled", aclTrkr.isEnabled());
coreSummary.add("CascadeTracker Enabled", cascadeTracker.isEnabled());
if (cascadeTracker != null)
{
coreSummary.add("CascadeTracker Enabled", cascadeTracker.isEnabled());
}
// TX

View File

@@ -373,11 +373,11 @@ public class SolrInformationServer implements InformationServer
public static final String AND = " AND ";
public static final String OR = " OR ";
private static final String REQUEST_HANDLER_NATIVE = "/native";
static final String REQUEST_HANDLER_NATIVE = "/native";
static final String REQUEST_HANDLER_GET = "/get";
static final String RESPONSE_DEFAULT_ID = "doc";
private static final String RESPONSE_DEFAULT_IDS = "response";
static final String RESPONSE_DEFAULT_IDS = "response";
static final String PREFIX_ERROR = "ERROR-";
@@ -712,11 +712,10 @@ public class SolrInformationServer implements InformationServer
{
ModifiableSolrParams params =
new ModifiableSolrParams(request.getParams())
.set(CommonParams.Q, "*:*")
.set(CommonParams.FQ, FIELD_DOC_TYPE + ":" + DOC_TYPE_NODE)
.set(CommonParams.Q, FIELD_DOC_TYPE + ":" + DOC_TYPE_NODE + " AND " + FIELD_TYPE + ":\"{http://www.alfresco.org/model/content/1.0}content\"")
.set(CommonParams.ROWS, 0)
.set(FacetParams.FACET, true)
.add(FacetParams.FACET_QUERY, "{!key='OUTDATED'}LATEST_APPLIED_CONTENT_VERSION_ID:{-10 TO -10}");
.set(FacetParams.FACET, "on")
.add(FacetParams.FACET_QUERY, "{!key='OUTDATED'}" + LAST_INCOMING_CONTENT_VERSION_ID + ":\"" + CONTENT_OUTDATED_MARKER + "\"");
SolrQueryResponse response = cloud.getResponse(nativeRequestHandler, request, params);
@@ -1720,7 +1719,11 @@ public class SolrInformationServer implements InformationServer
Optional<Collection<NodeMetaData>> nodeMetaDatas = getNodesMetaDataFromRepository(nmdp);
if (nodeMetaDatas.isEmpty() || nodeMetaDatas.get().isEmpty()) return;
if (nodeMetaDatas.isEmpty() || nodeMetaDatas.get().isEmpty())
{
// Using exception for flow handling to jump to error node processing.
throw new Exception("Error loading node metadata from repository.");
}
NodeMetaData nodeMetaData = nodeMetaDatas.get().iterator().next();
if (node.getTxnId() == Long.MAX_VALUE)
@@ -1753,7 +1756,7 @@ public class SolrInformationServer implements InformationServer
: null);
// UnindexedNodes are not indexed when solrcore property flag "recordUnindexedNodes" is set to false
if (addDocCmd != null)
if (addDocCmd.solrDoc != null)
{
processor.processAdd(addDocCmd);
}
@@ -2060,7 +2063,7 @@ public class SolrInformationServer implements InformationServer
if (nodesMetaDataFromRepository.isEmpty())
{
// Using exception for flow handling to jump to single node processing.
throw new Exception("Error loading node metadata from repository for bulk delete.");
throw new Exception("Error loading node metadata from repository for bulk update.");
}
NEXT_NODE:
@@ -2124,7 +2127,7 @@ public class SolrInformationServer implements InformationServer
}
catch (Exception e)
{
LOGGER.error(" Bulk indexing failed, do one node at a time. See the stacktrace below for further details.", e);
LOGGER.error("Bulk indexing failed, do one node at a time. See the stacktrace below for further details.", e);
for (Node node : nodes)
{
this.indexNode(node, true);
@@ -2625,7 +2628,7 @@ public class SolrInformationServer implements InformationServer
* - Do we have an entry in the local content store for the document X?
* - If we didn't have that entry then FTSSTATUS was set to New
*
* FTSSTatus = DIRTY
* FTSSTATUS = DIRTY
*
* When a node, which requires content, arrived here for being indexed, the "Dirty" status was set if
*
@@ -2637,7 +2640,7 @@ public class SolrInformationServer implements InformationServer
* something is different between the local document and the document which is in Solr".
*
* The content store allowed to workaround the problem: the code was deserializing the entry corresponding to the
* incoming node (which teoretically corresponded to the document indexed in Solr) so both versions were available
* incoming node (which theoretically corresponded to the document indexed in Solr) so both versions were available
* for making decisions
*
* ------------------
@@ -2646,7 +2649,7 @@ public class SolrInformationServer implements InformationServer
* about
*
* - the "INSERT" or "UPDATE" nature of the indexing operation that is going to be executed
* - the values of fields of a given document indeded in Solr
* - the values of fields of a given document indexed in Solr
*
* In order to indicate to the ContentTracker which documents will require the content update, we added two
* additional fields in the schema:
@@ -2656,8 +2659,8 @@ public class SolrInformationServer implements InformationServer
* LATEST_APPLIED_CONTENT_VERSION_ID: as the name suggests, this is the latest DOCID applied to this document (again, not the lucene docid)
* </li>
* <li>
* LAST_INCOMING_CONTENT_VERSION_ID: a field that will contains "-10" if the content is outdated, otherwise it will have the same value
* of LATEST_APPLIED_CONTENT_VERSION_ID.
* LAST_INCOMING_CONTENT_VERSION_ID: this call will set the field to "-10" as the content is outdated. The content tracker will then set it
* to have same value as LATEST_APPLIED_CONTENT_VERSION_ID.
* </li>
* </ul>
*
@@ -2667,11 +2670,8 @@ public class SolrInformationServer implements InformationServer
{
ofNullable(value)
.map(ContentPropertyValue::getId)
.ifPresentOrElse(
id -> {
document.setField(LATEST_APPLIED_CONTENT_VERSION_ID, id);
document.setField(LAST_INCOMING_CONTENT_VERSION_ID, Map.of("removeregex", "^(?!"+id+"$).*$"));},
() -> document.setField(LAST_INCOMING_CONTENT_VERSION_ID, CONTENT_OUTDATED_MARKER));
.ifPresent(id -> document.setField(LATEST_APPLIED_CONTENT_VERSION_ID, id));
document.setField(LAST_INCOMING_CONTENT_VERSION_ID, CONTENT_OUTDATED_MARKER);
}
private void addContentProperty(

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Search Services
* %%
* Copyright (C) 2005 - 2020 Alfresco Software Limited
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -26,6 +26,8 @@
package org.alfresco.solr.security;
import static org.alfresco.solr.security.SecretSharedPropertyCollector.SECURE_COMMS_PROPERTY;
import java.io.IOException;
import java.util.Map;
import java.util.Objects;
@@ -49,6 +51,8 @@ import org.apache.solr.security.AuthenticationPlugin;
public class SecretSharedAuthPlugin extends AuthenticationPlugin
{
private static final String SECURE_COMMS_NONE = "none";
/**
* Verify that request header includes "secret" word when using "secret" communication method.
* "alfresco.secureComms.secret" value is expected as Java environment variable.
@@ -69,10 +73,17 @@ public class SecretSharedAuthPlugin extends AuthenticationPlugin
return true;
}
HttpServletResponse httpResponse = (HttpServletResponse) response;
httpResponse.sendError(HttpServletResponse.SC_FORBIDDEN,
"Authentication failure: \"" + SecretSharedPropertyCollector.SECRET_SHARED_METHOD_KEY
+ "\" method has been selected, use the right request header with the secret word");
String errorMessage = "Authentication failure: \"" + SecretSharedPropertyCollector.SECRET_SHARED_METHOD_KEY
+ "\" method has been selected, use the right request header with the secret word";
setErrorResponse(response, errorMessage);
return false;
}
else if (SECURE_COMMS_NONE.equals(SecretSharedPropertyCollector.getCommsMethod())
&& !SecretSharedPropertyCollector.isAllowUnauthenticatedSolrEndpoint())
{
String errorMessage = "Authentication failure: \"" + SECURE_COMMS_PROPERTY
+ "=none\" is no longer supported. Please use \"https\" or \"secret\" instead.";
setErrorResponse(response, errorMessage);
return false;
}
@@ -81,6 +92,12 @@ public class SecretSharedAuthPlugin extends AuthenticationPlugin
}
private void setErrorResponse(ServletResponse response, String errorMessage) throws IOException
{
HttpServletResponse httpResponse = (HttpServletResponse) response;
httpResponse.sendError(HttpServletResponse.SC_FORBIDDEN, errorMessage);
}
@Override
public void init(Map<String, Object> parameters)
{

View File

@@ -26,13 +26,21 @@
package org.alfresco.solr.security;
import static java.util.function.Predicate.not;
import org.alfresco.httpclient.HttpClientFactory;
import org.alfresco.solr.AlfrescoSolrDataModel;
import org.alfresco.solr.config.ConfigUtil;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.BiFunction;
import java.util.function.Consumer;
/**
* Provides property values for Alfresco Communication using "secret" method:
@@ -45,15 +53,28 @@ import java.util.Set;
public class SecretSharedPropertyCollector
{
public final static String SECRET_SHARED_METHOD_KEY = "secret";
public static final String SECRET_SHARED_METHOD_KEY = "secret";
// Property names for "secret" communication method
static final String SECURE_COMMS_PROPERTY = "alfresco.secureComms";
private final static String SHARED_SECRET = "alfresco.secureComms.secret";
private final static String SHARED_SECRET_HEADER = "alfresco.secureComms.secret.header";
static final String SHARED_SECRET = "alfresco.secureComms.secret";
static final String ALLOW_UNAUTHENTICATED_SOLR_PROPERTY = "alfresco.allowUnauthenticatedSolrEndpoint";
private static final String SHARED_SECRET_HEADER = "alfresco.secureComms.secret.header";
// Save communication method as static value in order to improve performance
static String commsMethod;
// Memoize read properties to improve performance
static final Map<String, String> PROPS_CACHE = new ConcurrentHashMap<>();
// Ordered list of property location functions
private static final ArrayList<BiFunction<String, String, Set<String>>> PROPERTY_LOCATORS = new ArrayList<>();
static
{
// Environment variables
PROPERTY_LOCATORS.add((name, defaultValue) -> toSet(ConfigUtil.locateProperty(name, null)));
// Shared configuration (shared.properties file)
PROPERTY_LOCATORS.add((name, defaultValue) -> toSet(AlfrescoSolrDataModel.getCommonConfig().getProperty(name)));
// Configuration for each deployed SOLR Core
PROPERTY_LOCATORS.add(SecretSharedPropertyHelper::getPropertyFromCores);
}
/**
* Check if communications method is "secret"
@@ -65,50 +86,63 @@ public class SecretSharedPropertyCollector
SecretSharedPropertyCollector.SECRET_SHARED_METHOD_KEY);
}
/**
* Check if unauthenticated Solr access is allowed
* @return true if unauthenticated Solr access is allowed
*/
public static boolean isAllowUnauthenticatedSolrEndpoint()
{
return Boolean.parseBoolean(PROPS_CACHE.computeIfAbsent(ALLOW_UNAUTHENTICATED_SOLR_PROPERTY,
key -> getProperty(key, "false")));
}
/**
* Get communication method from environment variables, shared properties or core properties.
* @return Communication method: none, https, secret
*/
static String getCommsMethod()
{
if (commsMethod == null)
return PROPS_CACHE.computeIfAbsent(SECURE_COMMS_PROPERTY,
key -> getProperty(key, "none", uniqueSecureCommsValidator()));
}
private static String getProperty(String name, String defaultValue)
{
return getProperty(name, defaultValue, null);
}
private static String getProperty(String name, String defaultValue, Consumer<Set<String>> propertySetValidator)
{
// Loop orderly through the property locators until the property is found
Set<String> propertySet = PROPERTY_LOCATORS.stream()
.map(propertyLocator -> propertyLocator.apply(name, defaultValue))
.filter(not(Set::isEmpty))
.findFirst()
.orElse(Set.of());
if (propertySetValidator != null)
{
// Environment variable
commsMethod = ConfigUtil.locateProperty(SECURE_COMMS_PROPERTY, null);
if (commsMethod == null)
{
// Shared configuration (shared.properties file)
commsMethod = AlfrescoSolrDataModel.getCommonConfig().getProperty(SECURE_COMMS_PROPERTY);
if (commsMethod == null)
{
// Get configuration from deployed SOLR Cores
Set<String> secureCommsSet = SecretSharedPropertyHelper.getCommsFromCores();
// In case of multiple cores, *all* of them must have the same secureComms value.
// From that perspective, you may find the second clause in the conditional statement
// below not strictly necessary. The reason is that the check below is in charge to make
// sure a consistent configuration about the secret shared property has been defined in all cores.
if (secureCommsSet.size() > 1 && secureCommsSet.contains(SECRET_SHARED_METHOD_KEY))
{
throw new RuntimeException(
"No valid secure comms values: all the cores must be using \"secret\" communication method but found: "
+ secureCommsSet);
}
return commsMethod =
secureCommsSet.isEmpty()
? null
: secureCommsSet.iterator().next();
}
}
// Run the propertySetValidator to eg. verify value uniqueness among multiple cores
propertySetValidator.accept(propertySet);
}
return commsMethod;
return propertySet.isEmpty() ? null : propertySet.iterator().next();
}
private static Consumer<Set<String>> uniqueSecureCommsValidator()
{
// In case of multiple cores, *all* of them must have the same secureComms value.
// From that perspective, you may find the second clause in the conditional statement
// below not strictly necessary. The reason is that the check below is in charge to make
// sure a consistent configuration about the secret shared property has been defined in all cores.
return secureCommsSet -> {
if (secureCommsSet.size() > 1 && secureCommsSet.contains(SECRET_SHARED_METHOD_KEY))
{
throw new RuntimeException(
"No valid secure comms values: all the cores must be using \"secret\" communication method but found: "
+ secureCommsSet);
}
};
}
/**
@@ -126,7 +160,8 @@ public class SecretSharedPropertyCollector
if (secret == null || secret.length() == 0)
{
throw new RuntimeException("Missing value for " + SHARED_SECRET + " configuration property");
throw new RuntimeException("Missing value for " + SHARED_SECRET + " configuration property. Make sure to"
+ " pass this property as a JVM Argument (eg. -D" + SHARED_SECRET + "=my-secret-value).");
}
return secret;
@@ -167,4 +202,16 @@ public class SecretSharedPropertyCollector
return properties;
}
private static Set<String> toSet(String value)
{
Set<String> propertySet = new HashSet<>();
if (value != null)
{
propertySet.add(value);
}
return propertySet;
}
}

View File

@@ -58,11 +58,12 @@ class SecretSharedPropertyHelper
};
/**
* Read different values of "alfresco.secureComms" property from every "solrcore.properties" files.
*
* Read different values of the specified property from every "solrcore.properties" file.
* @param name The name of the property to read
* @param defaultValue The default value for the given property
* @return List of different communication methods declared in SOLR Cores.
*/
static Set<String> getCommsFromCores()
static Set<String> getPropertyFromCores(String name, String defaultValue)
{
try (Stream<Path> walk = Files.walk(Paths.get(SolrResourceLoader.locateSolrHome().toString())))
{
@@ -74,7 +75,7 @@ class SecretSharedPropertyHelper
return solrCorePropertiesFiles.stream()
.map(toProperties)
.map(properties -> properties.getProperty(SECURE_COMMS_PROPERTY, "none"))
.map(properties -> properties.getProperty(name, defaultValue))
.collect(toSet());
}
catch (IOException e)

View File

@@ -222,18 +222,7 @@ public abstract class AbstractTracker implements Tracker
assert(assertTrackerStateRemainsNull());
}
if(this.state == null)
{
this.state = getTrackerState();
LOGGER.debug("[{} / {} / {}] Global Tracker State set to: {}", coreName, trackerId, iterationId, this.state.toString());
this.state.setRunning(true);
}
else
{
continueState();
this.state.setRunning(true);
}
updateTrackerState(iterationId);
infoSrv.registerTrackerThread();
@@ -275,6 +264,21 @@ public abstract class AbstractTracker implements Tracker
}
}
private synchronized void updateTrackerState(String iterationId) {
if(this.state == null)
{
this.state = getTrackerState();
LOGGER.debug("[{} / {} / {}] Global Tracker State set to: {}", coreName, trackerId, iterationId, this.state.toString());
}
else
{
continueState();
}
this.state.setRunning(true);
}
/**
* At the end of the tracking method, the {@link TrackerState} should be turned off.
* However, during a rollback (that could be started by another tracker) the {@link TrackerState} instance

View File

@@ -2,7 +2,7 @@
* #%L
* Alfresco Search Services
* %%
* Copyright (C) 2005 - 2020 Alfresco Software Limited
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
@@ -594,32 +594,24 @@ public class MetadataTracker extends ActivatableTracker
private void reindexNodes() throws IOException, AuthenticationException, JSONException
{
boolean requiresCommit = false;
while (nodesToReindex.peek() != null)
{
Long nodeId = nodesToReindex.poll();
if (nodeId != null)
{
// make sure it is cleaned out so we do not miss deletes
this.infoSrv.deleteByNodeId(nodeId);
Node node = new Node();
final Node node = new Node();
node.setId(nodeId);
node.setStatus(SolrApiNodeStatus.UNKNOWN);
node.setTxnId(Long.MAX_VALUE);
this.infoSrv.indexNode(node, true);
LOGGER.info("REINDEX ACTION - Node {} has been reindexed", node.getId());
requiresCommit = true;
for (final Node n : filterNodes(List.of(node)))
{
this.infoSrv.indexNode(n, true);
LOGGER.info("REINDEX ACTION - Node {} has been reindexed", n.getId());
}
}
checkShutdown();
}
if(requiresCommit)
{
checkShutdown();
//this.infoSrv.commit();
}
}
private void reindexNodesByQuery() throws IOException, AuthenticationException, JSONException
@@ -644,6 +636,44 @@ public class MetadataTracker extends ActivatableTracker
}
}
private List<Node> filterNodes(List<Node> nodes)
{
List<Node> filteredList = new ArrayList<>(nodes.size());
for(Node node : nodes)
{
if(docRouter.routeNode(shardCount, shardInstance, node))
{
filteredList.add(node);
}
else if (cascadeTrackerEnabled)
{
if(node.getStatus() == SolrApiNodeStatus.UPDATED)
{
Node doCascade = new Node();
doCascade.setAclId(node.getAclId());
doCascade.setId(node.getId());
doCascade.setNodeRef(node.getNodeRef());
doCascade.setStatus(SolrApiNodeStatus.NON_SHARD_UPDATED);
doCascade.setTenant(node.getTenant());
doCascade.setTxnId(node.getTxnId());
filteredList.add(doCascade);
}
else // DELETED & UNKNOWN
{
// Make sure anything no longer relevant to this shard is deleted.
Node doDelete = new Node();
doDelete.setAclId(node.getAclId());
doDelete.setId(node.getId());
doDelete.setNodeRef(node.getNodeRef());
doDelete.setStatus(SolrApiNodeStatus.NON_SHARD_DELETED);
doDelete.setTenant(node.getTenant());
doDelete.setTxnId(node.getTxnId());
filteredList.add(doDelete);
}
}
}
return filteredList;
}
private void purgeTransactions() throws IOException, JSONException
{
@@ -808,7 +838,7 @@ public class MetadataTracker extends ActivatableTracker
{
LOGGER.debug(
"{}-[CORE {}] [DB_ID_RANGE] Last commit time is greater that max commit time in in range [{}-{}]. "
+ "Indexing only latest transaction.",
+ "Indexing only latest transaction if necessary.",
Thread.currentThread().getId(), coreName, dbIdRangeRouter.getStartRange(),
dbIdRangeRouter.getEndRange());
shardOutOfRange = true;
@@ -832,10 +862,19 @@ public class MetadataTracker extends ActivatableTracker
Transaction latestTransaction = new Transaction();
latestTransaction.setCommitTimeMs(transactions.getMaxTxnCommitTime());
latestTransaction.setId(transactions.getMaxTxnId());
transactions = new Transactions(
Collections.singletonList(latestTransaction),
transactions.getMaxTxnCommitTime(),
if (!isTransactionIndexed(latestTransaction))
{
transactions = new Transactions(Collections.singletonList(latestTransaction), transactions.getMaxTxnCommitTime(),
transactions.getMaxTxnId());
LOGGER.debug("{}:{}-[CORE {}] [DB_ID_RANGE] Latest transaction to be indexed {}",
Thread.currentThread().getId(), coreName, latestTransaction);
}
else
{
// All up do date, don't return transactions
return new Transactions(Collections.emptyList(), 0L, 0L);
}
}
return transactions;
@@ -1155,45 +1194,6 @@ public class MetadataTracker extends ActivatableTracker
{
setRollback(true, failCausedBy);
}
private List<Node> filterNodes(List<Node> nodes)
{
List<Node> filteredList = new ArrayList<>(nodes.size());
for(Node node : nodes)
{
if(docRouter.routeNode(shardCount, shardInstance, node))
{
filteredList.add(node);
}
else if (cascadeTrackerEnabled)
{
if(node.getStatus() == SolrApiNodeStatus.UPDATED)
{
Node doCascade = new Node();
doCascade.setAclId(node.getAclId());
doCascade.setId(node.getId());
doCascade.setNodeRef(node.getNodeRef());
doCascade.setStatus(SolrApiNodeStatus.NON_SHARD_UPDATED);
doCascade.setTenant(node.getTenant());
doCascade.setTxnId(node.getTxnId());
filteredList.add(doCascade);
}
else // DELETED & UNKNOWN
{
// Make sure anything no longer relevant to this shard is deleted.
Node doDelete = new Node();
doDelete.setAclId(node.getAclId());
doDelete.setId(node.getId());
doDelete.setNodeRef(node.getNodeRef());
doDelete.setStatus(SolrApiNodeStatus.NON_SHARD_DELETED);
doDelete.setTenant(node.getTenant());
doDelete.setTxnId(node.getTxnId());
filteredList.add(doDelete);
}
}
}
return filteredList;
}
}
@Override

View File

@@ -27,6 +27,7 @@
package org.alfresco.solr;
import static java.util.Optional.ofNullable;
import static org.alfresco.service.cmr.dictionary.DataTypeDefinition.ANY;
import static org.alfresco.service.cmr.dictionary.DataTypeDefinition.ASSOC_REF;
import static org.alfresco.service.cmr.dictionary.DataTypeDefinition.BOOLEAN;
@@ -45,6 +46,10 @@ import static org.alfresco.service.cmr.dictionary.DataTypeDefinition.PATH;
import static org.alfresco.service.cmr.dictionary.DataTypeDefinition.PERIOD;
import static org.alfresco.service.cmr.dictionary.DataTypeDefinition.QNAME;
import static org.alfresco.service.cmr.dictionary.DataTypeDefinition.TEXT;
import static org.alfresco.solr.SolrInformationServer.REQUEST_HANDLER_GET;
import static org.alfresco.solr.SolrInformationServer.REQUEST_HANDLER_NATIVE;
import static org.alfresco.solr.SolrInformationServer.RESPONSE_DEFAULT_ID;
import static org.alfresco.solr.SolrInformationServer.RESPONSE_DEFAULT_IDS;
import static org.alfresco.solr.SolrInformationServer.UNIT_OF_TIME_DAY_FIELD_SUFFIX;
import static org.alfresco.solr.SolrInformationServer.UNIT_OF_TIME_HOUR_FIELD_SUFFIX;
import static org.alfresco.solr.SolrInformationServer.UNIT_OF_TIME_MINUTE_FIELD_SUFFIX;
@@ -57,31 +62,51 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import org.alfresco.httpclient.AuthenticationException;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.search.adaptor.QueryConstants;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.PropertyDefinition;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.solr.client.Node;
import org.alfresco.solr.client.NodeMetaData;
import org.alfresco.solr.client.NodeMetaDataParameters;
import org.alfresco.solr.client.SOLRAPIClient;
import org.alfresco.solr.client.StringPropertyValue;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.response.BasicResultContext;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.search.DocList;
import org.apache.solr.search.DocSlice;
import org.apache.solr.update.AddUpdateCommand;
import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.apache.solr.update.processor.UpdateRequestProcessorChain;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@@ -120,10 +145,18 @@ public class SolrInformationServerTest
private SolrQueryRequest request;
@Mock
private UpdateRequestProcessorChain updateRequestProcessorChain;
@Mock
private UpdateRequestProcessor updateRequestProcessor;
@Before
public void setUp()
{
when(core.getResourceLoader()).thenReturn(resourceLoader);
when(core.getRequestHandler(REQUEST_HANDLER_GET)).thenReturn(handler);
when(core.getRequestHandler(REQUEST_HANDLER_NATIVE)).thenReturn(handler);
when(resourceLoader.getCoreProperties()).thenReturn(new Properties());
infoServer = new SolrInformationServer(adminHandler, core, client)
{
@@ -135,6 +168,10 @@ public class SolrInformationServerTest
};
request = infoServer.newSolrQueryRequest();
// mock updateProcessingChain -> createProcessor -> processAdd method calls
when(core.getUpdateProcessingChain(null)).thenReturn(updateRequestProcessorChain);
when(updateRequestProcessorChain.createProcessor(any(),any())).thenReturn(updateRequestProcessor);
}
@Test
@@ -145,15 +182,14 @@ public class SolrInformationServerTest
SolrDocument state = new SolrDocument();
SimpleOrderedMap<SolrDocument> responseContent = new SimpleOrderedMap<>();
responseContent.add(SolrInformationServer.RESPONSE_DEFAULT_ID, state);
responseContent.add(RESPONSE_DEFAULT_ID, state);
when(response.getValues()).thenReturn(responseContent);
when(core.getRequestHandler(SolrInformationServer.REQUEST_HANDLER_GET)).thenReturn(handler);
SolrDocument document = infoServer.getState(core, request, id);
assertEquals(id, request.getParams().get(CommonParams.ID));
verify(core).getRequestHandler(SolrInformationServer.REQUEST_HANDLER_GET);
verify(core).getRequestHandler(REQUEST_HANDLER_GET);
verify(response).getValues();
assertSame(state, document);
@@ -313,15 +349,14 @@ public class SolrInformationServerTest
String id = String.valueOf(System.currentTimeMillis());
SimpleOrderedMap<Object> responseContent = new SimpleOrderedMap<>();
responseContent.add(SolrInformationServer.RESPONSE_DEFAULT_ID, null);
responseContent.add(RESPONSE_DEFAULT_ID, null);
when(response.getValues()).thenReturn(responseContent);
when(core.getRequestHandler(SolrInformationServer.REQUEST_HANDLER_GET)).thenReturn(handler);
SolrDocument document = infoServer.getState(core, request, id);
assertEquals(id, request.getParams().get(CommonParams.ID));
verify(core).getRequestHandler(SolrInformationServer.REQUEST_HANDLER_GET);
verify(core).getRequestHandler(REQUEST_HANDLER_GET);
verify(response).getValues();
assertNull(document);
@@ -417,5 +452,86 @@ public class SolrInformationServerTest
});
}
}
/** Check that the FTS report is derived from the correct parts of the Solr response. */
@Test
public void testAddContentOutdatedAndUpdatedCounts()
{
// Pretend that there are three documents in total.
NamedList<Object> responseContent = new SimpleOrderedMap<>();
DocList docList = mock(DocList.class);
when(docList.matches()).thenReturn(3);
BasicResultContext basicResultContext = mock(BasicResultContext.class);
when(basicResultContext.getDocList()).thenReturn(docList);
responseContent.add(RESPONSE_DEFAULT_IDS, basicResultContext);
// Set the facet to say one document is outdated.
NamedList<Number> facetQueries = new SimpleOrderedMap<>();
facetQueries.add("OUTDATED", 1);
NamedList<NamedList<Number>> facetCounts = new SimpleOrderedMap<>();
facetCounts.add("facet_queries", facetQueries);
responseContent.add("facet_counts", facetCounts);
// Set up the request handler to return the fake response.
doAnswer(invocation -> {
SolrQueryResponse solrQueryResponse = invocation.getArgument(1);
solrQueryResponse.setAllValues(responseContent);
return null;
}).when(handler).handleRequest(any(SolrQueryRequest.class), any(SolrQueryResponse.class));
// Call the method under test.
NamedList<Object> report = new NamedList<>();
infoServer.addContentOutdatedAndUpdatedCounts(report);
// Check the report.
assertEquals("Expected two content nodes to be in sync.", report.get("Node count whose content is in sync"), 2L);
assertEquals("Expected one content node to need an update.", report.get("Node count whose content needs to be updated"), 1L);
}
/** Test if a node with "cm:isIndexed"=false is ignored */
@Test
public void testUnindexedNode() throws IOException, AuthenticationException
{
// mocked node metadata
NodeMetaData nodeMetaData = new NodeMetaData();
nodeMetaData.setProperties(Map.of(ContentModel.PROP_IS_INDEXED, new StringPropertyValue("false")));
nodeMetaData.setAspects(Set.of(ContentModel.ASPECT_INDEX_CONTROL));
nodeMetaData.setType(ContentModel.TYPE_CONTENT);
nodeMetaData.setNodeRef(new NodeRef("workspace://SpacesStore/f7c71f35-b592-40e2-a15f-fccbadd6b4d3"));
when(client.getNodesMetaData(any(NodeMetaDataParameters.class))).thenReturn(Arrays.asList(nodeMetaData));
// when calling ".indexNode" method, ".deleteErrorNode" private method will need some resultContext, preferably with docListSize=0
// to ensure that, we need a "resultContext" with "matches"=0, which is created by having a "SolrQueryResponse" correctly setup,
// when the requestHandler handles the request
doAnswer(invocationOnMock -> {
SolrQueryRequest request = invocationOnMock.getArgument(0);
SolrQueryResponse response = invocationOnMock.getArgument(1);
NamedList<Object> namedList = new NamedList<>();
BasicResultContext rc = new BasicResultContext(new DocSlice(1, 1, new int[] {}, new float[] {}, 0, 0), null, null, null, request);
namedList.add("response", rc);
response.setAllValues(namedList);
return null;
}).when(handler).handleRequest(any(), any());
// checks if "UpdateRequestProcessor#processAdd" arguments contains DOC_TYPE as UnindexedNode
doAnswer(invocationOnMock -> {
AddUpdateCommand cmd = invocationOnMock.getArgument(0);
assertEquals("UnindexedNode", cmd.solrDoc.get("DOC_TYPE").getValue());
return null;
}).when(updateRequestProcessor).processAdd(any());
// Node to be fed to Solr
Node node = mock(Node.class);
when(node.getStatus()).thenReturn(Node.SolrApiNodeStatus.UPDATED);
when(node.getId()).thenReturn(865l);
// method to be tested - when we index a node with "cm:isIndexed", the node should not be indexed
infoServer.indexNode(node, false);
// verifies if the method was called
verify(updateRequestProcessor).processAdd(any());
}
}

View File

@@ -35,8 +35,12 @@ import java.util.Properties;
import java.util.Set;
import static java.util.Collections.emptySet;
import static org.alfresco.solr.security.SecretSharedPropertyCollector.ALLOW_UNAUTHENTICATED_SOLR_PROPERTY;
import static org.alfresco.solr.security.SecretSharedPropertyCollector.PROPS_CACHE;
import static org.alfresco.solr.security.SecretSharedPropertyCollector.SECRET_SHARED_METHOD_KEY;
import static org.alfresco.solr.security.SecretSharedPropertyCollector.SECURE_COMMS_PROPERTY;
import static org.alfresco.solr.security.SecretSharedPropertyCollector.SHARED_SECRET;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
@@ -45,30 +49,133 @@ import static org.mockito.Mockito.mockStatic;
public class SecretSharedPropertyCollectorTest
{
private final static String A_COMMS_METHOD = "aCommsMethod";
private final static String SET_THROUGH_SYSTEM_PROPERTY = "aCommsMethod_SetThroughSystemProperty";
private final static String SET_THROUGH_ALFRESCO_COMMON_CONFIG = "aCommsMethod_SetThroughAlfrescoCommonConfig";
private final static String COMMS_METHOD_FROM_SOLRCORE = "aCommsMethod_FromSolrCore";
private static final String A_COMMS_METHOD = "aCommsMethod";
private static final String SET_THROUGH_SYSTEM_PROPERTY = "aCommsMethod_SetThroughSystemProperty";
private static final String SET_THROUGH_ALFRESCO_COMMON_CONFIG = "aCommsMethod_SetThroughAlfrescoCommonConfig";
private static final String COMMS_METHOD_FROM_SOLRCORE = "aCommsMethod_FromSolrCore";
private static final String SECRET_VALUE = "my-secret";
private static final String SECURE_COMMS_NONE = "none";
private static final String TRUE = "true";
private static final String FALSE = "false";
private static final Set<String> PROPS_TO_CLEAR = Set.of(SHARED_SECRET, SECURE_COMMS_PROPERTY, ALLOW_UNAUTHENTICATED_SOLR_PROPERTY);
@Before
public void setUp()
{
SecretSharedPropertyCollector.commsMethod = null;
assertNull(System.getProperty(SECURE_COMMS_PROPERTY));
assertNull(AlfrescoSolrDataModel.getCommonConfig().getProperty(SECURE_COMMS_PROPERTY));
PROPS_CACHE.clear();
for (String property : PROPS_TO_CLEAR)
{
assertNull(System.getProperty(property));
assertNull(AlfrescoSolrDataModel.getCommonConfig().getProperty(property));
}
}
@After
public void tearDown()
{
System.clearProperty(SECURE_COMMS_PROPERTY);
AlfrescoSolrDataModel.getCommonConfig().remove(SECURE_COMMS_PROPERTY);
for (String property : PROPS_TO_CLEAR)
{
System.clearProperty(property);
AlfrescoSolrDataModel.getCommonConfig().remove(property);
}
}
@Test
public void getSecret_shouldReturnTheSecretValue()
{
System.setProperty(SecretSharedPropertyCollector.SHARED_SECRET, SECRET_VALUE);
assertEquals(SECRET_VALUE, SecretSharedPropertyCollector.getSecret());
}
@Test(expected = RuntimeException.class)
public void getSecretWithMissingSecretValue_shouldThrowException()
{
SecretSharedPropertyCollector.getSecret();
}
@Test
public void allowUnauthenticatedSolrIsNotSet_shouldReturnFalse()
{
try(MockedStatic<SecretSharedPropertyHelper> mock = mockStatic(SecretSharedPropertyHelper.class))
{
mock.when(() -> SecretSharedPropertyHelper.getPropertyFromCores(ALLOW_UNAUTHENTICATED_SOLR_PROPERTY, FALSE))
.thenReturn(emptySet());
assertFalse(SecretSharedPropertyCollector.isAllowUnauthenticatedSolrEndpoint());
}
}
@Test
public void allowUnauthenticatedSolrIsTrueThroughSystemProperty_shouldReturnTrue()
{
System.setProperty(ALLOW_UNAUTHENTICATED_SOLR_PROPERTY, TRUE);
assertTrue(SecretSharedPropertyCollector.isAllowUnauthenticatedSolrEndpoint());
}
@Test
public void allowUnauthenticatedSolrIsFalseThroughSystemProperty_shouldReturnFalse()
{
System.setProperty(ALLOW_UNAUTHENTICATED_SOLR_PROPERTY, FALSE);
assertFalse(SecretSharedPropertyCollector.isAllowUnauthenticatedSolrEndpoint());
}
@Test
public void allowUnauthenticatedSolrIsTrueThroughAlfrescoProperties_shouldReturnTrue()
{
try(MockedStatic<AlfrescoSolrDataModel> mock = mockStatic(AlfrescoSolrDataModel.class))
{
var alfrescoCommonConfig = new Properties();
alfrescoCommonConfig.setProperty(ALLOW_UNAUTHENTICATED_SOLR_PROPERTY, TRUE);
mock.when(AlfrescoSolrDataModel::getCommonConfig).thenReturn(alfrescoCommonConfig);
assertTrue(SecretSharedPropertyCollector.isAllowUnauthenticatedSolrEndpoint());
}
}
@Test
public void allowUnauthenticatedSolrIsFalseThroughAlfrescoProperties_shouldReturnFalse()
{
try(MockedStatic<AlfrescoSolrDataModel> mock = mockStatic(AlfrescoSolrDataModel.class))
{
var alfrescoCommonConfig = new Properties();
alfrescoCommonConfig.setProperty(ALLOW_UNAUTHENTICATED_SOLR_PROPERTY, FALSE);
mock.when(AlfrescoSolrDataModel::getCommonConfig).thenReturn(alfrescoCommonConfig);
assertFalse(SecretSharedPropertyCollector.isAllowUnauthenticatedSolrEndpoint());
}
}
@Test
public void allowUnauthenticatedSolrIsTrueThroughSolrCores_shouldReturnTrue()
{
try(MockedStatic<SecretSharedPropertyHelper> mock = mockStatic(SecretSharedPropertyHelper.class))
{
mock.when(() -> SecretSharedPropertyHelper.getPropertyFromCores(ALLOW_UNAUTHENTICATED_SOLR_PROPERTY, FALSE))
.thenReturn(Set.of(TRUE));
assertTrue(SecretSharedPropertyCollector.isAllowUnauthenticatedSolrEndpoint());
}
}
@Test
public void allowUnauthenticatedSolrIsFalseThroughSolrCores_shouldReturnFalse()
{
try(MockedStatic<SecretSharedPropertyHelper> mock = mockStatic(SecretSharedPropertyHelper.class))
{
mock.when(() -> SecretSharedPropertyHelper.getPropertyFromCores(ALLOW_UNAUTHENTICATED_SOLR_PROPERTY, FALSE))
.thenReturn(Set.of(FALSE));
assertFalse(SecretSharedPropertyCollector.isAllowUnauthenticatedSolrEndpoint());
}
}
@Test
public void commsMethodIsNotNull_shouldReturnThatValue()
{
SecretSharedPropertyCollector.commsMethod = A_COMMS_METHOD;
PROPS_CACHE.put(SECURE_COMMS_PROPERTY, A_COMMS_METHOD);
assertEquals(A_COMMS_METHOD, SecretSharedPropertyCollector.getCommsMethod());
assertFalse(SecretSharedPropertyCollector.isCommsSecretShared());
@@ -77,7 +184,7 @@ public class SecretSharedPropertyCollectorTest
@Test
public void commsMethodIsNotNullAndIsSecret_shouldReturnThatValue()
{
SecretSharedPropertyCollector.commsMethod = SECRET_SHARED_METHOD_KEY;
PROPS_CACHE.put(SECURE_COMMS_PROPERTY, SECRET_SHARED_METHOD_KEY);
assertEquals(SECRET_SHARED_METHOD_KEY, SecretSharedPropertyCollector.getCommsMethod());
assertTrue(SecretSharedPropertyCollector.isCommsSecretShared());
@@ -121,7 +228,8 @@ public class SecretSharedPropertyCollectorTest
{
try(MockedStatic<SecretSharedPropertyHelper> mock = mockStatic(SecretSharedPropertyHelper.class))
{
mock.when(SecretSharedPropertyHelper::getCommsFromCores).thenReturn(Set.of(COMMS_METHOD_FROM_SOLRCORE));
mock.when(() -> SecretSharedPropertyHelper.getPropertyFromCores(SECURE_COMMS_PROPERTY, SECURE_COMMS_NONE))
.thenReturn(Set.of(COMMS_METHOD_FROM_SOLRCORE));
assertEquals(COMMS_METHOD_FROM_SOLRCORE, SecretSharedPropertyCollector.getCommsMethod());
assertFalse(SecretSharedPropertyCollector.isCommsSecretShared());
@@ -140,7 +248,8 @@ public class SecretSharedPropertyCollectorTest
{
try(MockedStatic<SecretSharedPropertyHelper> mock = mockStatic(SecretSharedPropertyHelper.class))
{
mock.when(SecretSharedPropertyHelper::getCommsFromCores).thenReturn(emptySet());
mock.when(() -> SecretSharedPropertyHelper.getPropertyFromCores(SECURE_COMMS_PROPERTY, SECURE_COMMS_NONE))
.thenReturn(emptySet());
assertNull(SecretSharedPropertyCollector.getCommsMethod());
assertFalse(SecretSharedPropertyCollector.isCommsSecretShared());
@@ -156,10 +265,11 @@ public class SecretSharedPropertyCollectorTest
{
try(MockedStatic<SecretSharedPropertyHelper> mock = mockStatic(SecretSharedPropertyHelper.class))
{
mock.when(SecretSharedPropertyHelper::getCommsFromCores)
.thenReturn(Set.of(COMMS_METHOD_FROM_SOLRCORE, SECRET_SHARED_METHOD_KEY));
mock.when(() -> SecretSharedPropertyHelper.getPropertyFromCores(SECURE_COMMS_PROPERTY, SECURE_COMMS_NONE))
.thenReturn(Set.of(COMMS_METHOD_FROM_SOLRCORE, SECRET_SHARED_METHOD_KEY));
SecretSharedPropertyCollector.getCommsMethod();
}
}
}

View File

@@ -0,0 +1,183 @@
/*
* #%L
* Alfresco Search Services
* %%
* Copyright (C) 2005 - 2022 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.solr.tracker;
import static org.alfresco.solr.AlfrescoSolrUtils.getNode;
import static org.alfresco.solr.AlfrescoSolrUtils.getNodeMetaData;
import static org.alfresco.solr.AlfrescoSolrUtils.getTransaction;
import java.io.IOException;
import java.time.Duration;
import java.time.Instant;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.alfresco.repo.search.adaptor.QueryConstants;
import org.alfresco.solr.AbstractAlfrescoDistributedIT;
import org.alfresco.solr.AlfrescoCoreAdminHandler;
import org.alfresco.solr.AlfrescoSolrUtils.TestActChanges;
import org.alfresco.solr.client.Acl;
import org.alfresco.solr.client.AclChangeSet;
import org.alfresco.solr.client.Node;
import org.alfresco.solr.client.NodeMetaData;
import org.alfresco.solr.client.Transaction;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.params.CoreAdminParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.core.SolrCore;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@SolrTestCaseJ4.SuppressSSL
public class DistributedAlfrescoSolrMetadataTrackerReindexingIT extends AbstractAlfrescoDistributedIT
{
private static final int NUMBER_OF_SHARDS = 2;
private static final Duration TEST_DURATION = Duration.ofMinutes(1);
@BeforeClass
public static void initData() throws Throwable
{
initSolrServers(NUMBER_OF_SHARDS, getSimpleClassName(), null);
}
@AfterClass
public static void destroyData()
{
dismissSolrServers();
}
@Test
public void shouldReindexNodeOnlyOnShardWhereNodeBelongsTo() throws Exception
{
putHandleDefaults();
long nodeId = indexNewNode();
assertThatNodeIsIndexedOnExactlyOneShard(nodeId);
final Instant end = Instant.now().plus(TEST_DURATION);
while (Instant.now().isBefore(end))
{
triggerNodeIdReindexingOnAllShards(nodeId);
assertThatNodeIsIndexedOnExactlyOneShard(nodeId);
Thread.sleep(7_000);
}
}
private void triggerNodeIdReindexingOnAllShards(long nodeId) throws Exception
{
final Map<String, SolrCore> cores = getCores(solrShards)
.stream()
.collect(Collectors.toMap(SolrCore::getName, Function.identity()));
final List<AlfrescoCoreAdminHandler> adminHandlers = getAdminHandlers(solrShards);
assertEquals(NUMBER_OF_SHARDS, adminHandlers.size());
for (int i = 0; i < NUMBER_OF_SHARDS; i++)
{
final SolrCore core = cores.get("shard" + i);
assertNotNull(core);
final AlfrescoCoreAdminHandler admin = adminHandlers.get(i);
assertNotNull(admin);
final SolrQueryRequest reindexRequest = new LocalSolrQueryRequest(core,
params(
CoreAdminParams.ACTION, "REINDEX",
CoreAdminParams.CORE, core.getName(),
"nodeid", Long.toString(nodeId)
)
);
final SolrQueryResponse reindexResponse = new SolrQueryResponse();
admin.handleRequestBody(reindexRequest, reindexResponse);
assertNull(reindexResponse.getException());
}
}
private void assertThatNodeIsIndexedOnExactlyOneShard(long nodeId)
{
final List<SolrClient> allClients = getShardedClients();
assertEquals(NUMBER_OF_SHARDS, allClients.size());
final ModifiableSolrParams queryParams = params("qt", "/afts", "q", "DBID:" + nodeId);
final long sumForAllShards = allClients.stream().map(c -> {
try
{
return c.query(queryParams);
} catch (SolrServerException | IOException e)
{
throw new RuntimeException(e);
}
}).map(QueryResponse::getResults).mapToLong(SolrDocumentList::getNumFound).sum();
assertEquals(1, sumForAllShards);
}
private long indexNewNode() throws Exception
{
final Transaction tx = getTransaction(0, 1);
final Acl acl = getAcl();
final Node node = getNode(tx, acl, Node.SolrApiNodeStatus.UPDATED);
final NodeMetaData nodeMetaData = getNodeMetaData(node, tx, acl, "piotrek", null, false);
indexTransaction(tx, List.of(node), List.of(nodeMetaData));
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 1, 100000);
return node.getId();
}
private Acl getAcl() throws Exception
{
TestActChanges testActChanges = new TestActChanges().createBasicTestData();
AclChangeSet aclChangeSet = testActChanges.getChangeSet();
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!ACLTX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_ACLTXID, aclChangeSet.getId(), aclChangeSet.getId() + 1, true, false), BooleanClause.Occur.MUST));
BooleanQuery waitForQuery = builder.build();
waitForDocCountAllCores(waitForQuery, 1, 80000);
return testActChanges.getFirstAcl();
}
}

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-search-parent</artifactId>
<version>2.0.2</version>
<version>2.0.3</version>
</parent>
<properties>
@@ -20,8 +20,14 @@
<groupId>org.alfresco</groupId>
<artifactId>alfresco-data-model</artifactId>
<version>${dependency.alfresco-data-model.version}</version>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
@@ -46,6 +52,11 @@
<version>3.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>junit</groupId>
@@ -61,8 +72,8 @@
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.30</version>
<artifactId>slf4j-reload4j</artifactId>
<version>${slf4j.version}</version>
</dependency>
</dependencies>

View File

@@ -60,8 +60,7 @@ import org.alfresco.util.ISO8601DateFormat;
import org.alfresco.util.Pair;
import org.apache.commons.codec.EncoderException;
import org.apache.commons.codec.net.URLCodec;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.httpclient.util.DateUtil;
import org.apache.http.HttpStatus;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
@@ -75,6 +74,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
@@ -112,6 +112,8 @@ public class SOLRAPIClient
private static final String CHECKSUM_HEADER = "XAlfresco-modelChecksum";
private static final SimpleDateFormat httpHeaderDateFormat = new SimpleDateFormat("EEE', 'dd' 'MMM' 'yyyy' 'HH:mm:ss' 'Z", Locale.US);
private AlfrescoHttpClient repositoryHttpClient;
private SOLRDeserializer deserializer;
private DictionaryService dictionaryService;
@@ -492,7 +494,7 @@ public class SOLRAPIClient
{
String message = "Received a malformed JSON payload. Request was \"" +
req.getFullUri() +
"Data: "
"\" Data: "
+ ofNullable(reader)
.map(LookAheadBufferedReader::lookAheadAndGetBufferedContent)
.orElse("Not available");
@@ -1030,7 +1032,7 @@ public class SOLRAPIClient
Map<String, String> headers = new HashMap<>();
if(modifiedSince != null)
{
headers.put("If-Modified-Since", String.valueOf(DateUtil.formatDate(new Date(modifiedSince))));
headers.put("If-Modified-Since", httpHeaderDateFormat.format(new Date(modifiedSince)));
}
if (compression)
{
@@ -1156,7 +1158,7 @@ public class SOLRAPIClient
{
String message = "Received a malformed JSON payload. Request was \"" +
get.getFullUri() +
"Data: "
"\" Data: "
+ ofNullable(reader)
.map(LookAheadBufferedReader::lookAheadAndGetBufferedContent)
.orElse("Not available");
@@ -1214,7 +1216,7 @@ public class SOLRAPIClient
{
String message = "Received a malformed JSON payload. Request was \"" +
get.getFullUri() +
"Data: "
"\" Data: "
+ ofNullable(reader)
.map(LookAheadBufferedReader::lookAheadAndGetBufferedContent)
.orElse("Not available");
@@ -1609,7 +1611,7 @@ public class SOLRAPIClient
{
String message = "Received a malformed JSON payload. Request was \"" +
req.getFullUri() +
"Data: "
"\" Data: "
+ ofNullable(reader)
.map(LookAheadBufferedReader::lookAheadAndGetBufferedContent)
.orElse("Not available");

View File

@@ -38,7 +38,6 @@ import org.alfresco.httpclient.HttpClientFactory;
import org.alfresco.httpclient.HttpClientFactory.SecureCommsType;
import org.alfresco.repo.dictionary.NamespaceDAO;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.apache.commons.httpclient.params.DefaultHttpParams;
/**
* This factory encapsulates the creation of a SOLRAPIClient and the management of that resource.
@@ -263,13 +262,6 @@ public class SOLRAPIClientFactory
setMaxTotalConnections(maxTotalConnections);
setMaxHostConnections(maxHostConnections);
setSocketTimeout(socketTimeout);
init();
}
@Override
public void init()
{
DefaultHttpParams.setHttpParamsFactory(new NonBlockingHttpParamsFactory());
}
}

View File

@@ -32,7 +32,6 @@ import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.ConnectException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@@ -46,7 +45,7 @@ import org.alfresco.httpclient.Response;
import org.alfresco.repo.dictionary.NamespaceDAO;
import org.alfresco.repo.index.shard.ShardState;
import org.alfresco.service.namespace.QName;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.http.HttpStatus;
import org.json.JSONException;
// TODO error handling, including dealing with a repository that is not responsive (ConnectException in sendRemoteRequest)

View File

@@ -26,16 +26,13 @@
package org.alfresco.solr.client;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.security.AlgorithmParameters;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -48,11 +45,8 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.alfresco.encryption.DefaultEncryptionUtils;
import org.alfresco.encryption.KeyProvider;
import org.alfresco.encryption.KeyResourceLoader;
import org.alfresco.encryption.KeyStoreParameters;
import org.alfresco.encryption.MACUtils.MACInput;
import org.alfresco.encryption.ssl.SSLEncryptionParameters;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.httpclient.AlfrescoHttpClient;
@@ -76,13 +70,9 @@ import org.alfresco.repo.tenant.SingleTServiceImpl;
import org.alfresco.repo.tenant.TenantService;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.DynamicallySizedThreadPoolExecutor;
import org.alfresco.util.Pair;
import org.alfresco.util.TraceableThreadFactory;
import org.alfresco.util.cache.DefaultAsynchronouslyRefreshedCacheRegistry;
import org.apache.chemistry.opencmis.commons.enums.CmisVersion;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.methods.ByteArrayRequestEntity;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.JSONException;
@@ -103,10 +93,6 @@ public class SOLRAPIClientTest extends TestCase
private SOLRAPIClient client;
private SOLRAPIClient invalidKeyClient;
private SOLRAPIClient tamperWithClient;
private DictionaryDAOImpl dictionaryDAO;
private CMISStrictDictionaryService cmisDictionaryService;
@@ -567,118 +553,4 @@ public class SOLRAPIClientTest extends TestCase
assertTrue(diffs.size() > 0);
}
// public void testMAC() throws IOException, JSONException
// {
// // dodyClient has a secret key that is not the same as the repository's. This
// // should fail with a 401
// try
// {
// Transactions transactions = invalidKeyClient.getTransactions(1298288417234l, null, null, null, 5);
// }
// catch (AuthenticationException e)
// {
// assertEquals("Should have caught unathorised request", e.getMethod().getStatusCode(), HttpStatus.SC_UNAUTHORIZED);
// }
//
// try
// {
// tamperWithEncryptionService.setOverrideTimestamp(true);
// Transactions transactions = tamperWithClient.getTransactions(1298288417234l, null, null, null, 5);
// }
// catch (AuthenticationException e)
// {
// assertEquals("Should have caught unathorised request", e.getMethod().getStatusCode(), HttpStatus.SC_UNAUTHORIZED);
// }
// finally
// {
// tamperWithEncryptionService.setOverrideTimestamp(false);
// }
//
// try
// {
// tamperWithEncryptionService.setOverrideMAC(true);
// Transactions transactions = tamperWithClient.getTransactions(1298288417234l, null, null, null, 5);
// }
// catch (AuthenticationException e)
// {
// assertEquals("Should have caught unathorised request", e.getMethod().getStatusCode(), HttpStatus.SC_UNAUTHORIZED);
// }
// finally
// {
// tamperWithEncryptionService.setOverrideMAC(false);
// }
// }
private void outputTextContent(SOLRAPIClient.GetTextContentResponse response) throws IOException
{
InputStream in = response.getContent();
if (in != null)
{
logger.debug("Text content:");
BufferedReader reader = new BufferedReader(new InputStreamReader(in, "UTF-8"));
String line = null;
while ((line = reader.readLine()) != null)
{
logger.debug(line);
}
}
}
/**
* Overrides request encryption to create dodgy MAC and timestamp on requests
*/
private static class TestEncryptionUtils extends DefaultEncryptionUtils
{
private boolean overrideMAC = false;
private boolean overrideTimestamp = false;
public void setOverrideMAC(boolean overrideMAC)
{
this.overrideMAC = overrideMAC;
}
public void setOverrideTimestamp(boolean overrideTimestamp)
{
this.overrideTimestamp = overrideTimestamp;
}
@Override
public void setRequestAuthentication(HttpMethod method, byte[] message) throws IOException
{
if (method instanceof PostMethod)
{
// encrypt body
Pair<byte[], AlgorithmParameters> encrypted = encryptor.encrypt(KeyProvider.ALIAS_SOLR, null, message);
setRequestAlgorithmParameters(method, encrypted.getSecond());
((PostMethod) method).setRequestEntity(new ByteArrayRequestEntity(encrypted.getFirst(), "application/octet-stream"));
}
long requestTimestamp = System.currentTimeMillis();
// add MAC header
byte[] mac = macUtils.generateMAC(KeyProvider.ALIAS_SOLR, new MACInput(message, requestTimestamp, getLocalIPAddress()));
if (logger.isDebugEnabled())
{
logger.debug("Setting MAC " + mac + " on HTTP request " + method.getPath());
logger.debug("Setting timestamp " + requestTimestamp + " on HTTP request " + method.getPath());
}
if (overrideMAC)
{
mac[0] += (byte) 1;
}
setRequestMac(method, mac);
if (overrideTimestamp)
{
requestTimestamp += 60000;
}
// prevent replays
setRequestTimestamp(method, requestTimestamp);
}
}
}

View File

@@ -12,7 +12,7 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-search-parent</artifactId>
<version>2.0.2</version>
<version>2.0.3</version>
<relativePath>../pom.xml</relativePath>
</parent>
<dependencies>
@@ -111,7 +111,9 @@
<classifier>libs</classifier>
<outputDirectory>${project.build.directory}/solr-libs</outputDirectory>
<excludes>**/jackson-dataformat-smile-*.jar,**/asm-3.3.1.jar,**/jackson-core-asl-*.jar,**/jackson-mapper-asl-*.jar,**/dom4j-1.6.1.jar,
**/annotations-1.0.0.jar,**/spring-context-support-*.jar,**/spring-web-*.jar,**/woodstox-core-asl-4.4.1.jar</excludes>
**/annotations-1.0.0.jar,**/spring-context-support-*.jar,**/spring-web-*.jar,**/woodstox-core-asl-4.4.1.jar, **/isoparser-1.1.18.jar,
**/metadata-extractor-2.9.1.jar,**/org.restlet.ext.servlet-2.3.0.jar,**/reload4j-*.jar
</excludes>
</artifactItem>
</artifactItems>
</configuration>
@@ -130,6 +132,12 @@
<version>${project.version}</version>
<outputDirectory>${project.build.directory}/solr-libs/libs</outputDirectory>
</artifactItem>
<artifactItem>
<groupId>org.restlet.jee</groupId>
<artifactId>org.restlet.ext.servlet</artifactId>
<version>${dependency.restlet.version}</version>
<outputDirectory>${project.build.directory}/solr-libs/libs</outputDirectory>
</artifactItem>
</artifactItems>
<outputDirectory>${project.build.directory}/wars</outputDirectory>
<overWriteReleases>false</overWriteReleases>
@@ -160,6 +168,16 @@
<delete file="${project.build.directory}/solr-${solr.version}/server/solr-webapp/webapp/WEB-INF/lib/woodstox-core-asl-4.4.1.jar" />
<delete file="${project.build.directory}/solr-${solr.version}/server/solr-webapp/webapp/WEB-INF/lib/calcite-core-1.11.0.jar" />
<delete file="${project.build.directory}/solr-${solr.version}/server/solr-webapp/webapp/WEB-INF/lib/calcite-linq4j-1.11.0.jar" />
<delete file="${project.build.directory}/solr-${solr.version}/server/solr-webapp/webapp/WEB-INF/lib/hadoop-annotations-2.7.4.jar" />
<delete file="${project.build.directory}/solr-${solr.version}/server/solr-webapp/webapp/WEB-INF/lib/hadoop-auth-2.7.4.jar" />
<delete file="${project.build.directory}/solr-${solr.version}/server/solr-webapp/webapp/WEB-INF/lib/hadoop-common-2.7.4.jar" />
<delete file="${project.build.directory}/solr-${solr.version}/server/solr-webapp/webapp/WEB-INF/lib/hadoop-hdfs-2.7.4.jar" />
<delete file="${project.build.directory}/solr-${solr.version}/server/solr-webapp/webapp/WEB-INF/lib/avatica-core-1.9.0.jar" />
<delete file="${project.build.directory}/solr-${solr.version}/server/solr-webapp/webapp/WEB-INF/lib/org.restlet-2.3.0.jar" />
<delete file="${project.build.directory}/solr-${solr.version}/server/solr-webapp/webapp/WEB-INF/lib/org.restlet.ext.servlet-2.3.0.jar" />
<delete file="${project.build.directory}/solr-${solr.version}/server/solr-webapp/webapp/WEB-INF/lib/httpclient-4.4.1.jar" />
<delete file="${project.build.directory}/solr-${solr.version}/server/solr-webapp/webapp/libs/jquery-2.1.3.min.js" />
<delete file="${project.build.directory}/solr-${solr.version}/server/solr-webapp/webapp/js/lib/jquery-1.7.2.min.js" />
</target>
</configuration>
@@ -280,6 +298,22 @@
</replacements>
</configuration>
</execution>
<execution>
<id>jquery-upgrade</id>
<phase>prepare-package</phase>
<goals>
<goal>replace</goal>
</goals>
<configuration>
<file>${solr.directory}/server/solr-webapp/webapp/index.html</file>
<replacements>
<replacement>
<token>libs/jquery-2.1.3.min.js</token>
<value>libs/jquery-3.5.1.min.js</value>
</replacement>
</replacements>
</configuration>
</execution>
</executions>
</plugin>
<plugin>

View File

@@ -12,6 +12,16 @@
<outputDirectory>./solr/server/solr-webapp/webapp/img</outputDirectory>
<filtered>false</filtered>
</fileSet>
<fileSet>
<directory>${project.build.outputDirectory}/solr/server/solr-webapp/webapp/js/lib</directory>
<outputDirectory>./solr/server/solr-webapp/webapp/js/lib</outputDirectory>
<filtered>false</filtered>
</fileSet>
<fileSet>
<directory>${project.build.outputDirectory}/solr/server/solr-webapp/webapp/libs</directory>
<outputDirectory>./solr/server/solr-webapp/webapp/libs</outputDirectory>
<filtered>false</filtered>
</fileSet>
<fileSet>
<directory>${project.build.outputDirectory}</directory>
<outputDirectory>.</outputDirectory>

View File

@@ -1,7 +1,7 @@
# Alfresco Search Services ${project.version} Docker Image
FROM alfresco/alfresco-base-java:11.0.11-centos-8@sha256:2faecdacb239cf0a4721bc198538f365033d7e6c9f735b8cec96d7c995ed7345
LABEL creator="Gethin James" maintainer="Alfresco Search Services Team"
FROM alfresco/alfresco-base-java:11.0.13-centos-7@sha256:c1e399d1bbb5d08e0905f1a9ef915ee7c5ea0c0ede11cc9bd7ca98532a9b27fa
LABEL creator="Alfresco" maintainer="Alfresco"
ENV DIST_DIR /opt/alfresco-search-services
ENV SOLR_ZIP ${project.build.finalName}.zip
@@ -27,7 +27,6 @@ RUN set -x \
-u "${USERID}" \
-o \
"${USERNAME}" \
&& yum update -y \
&& yum install -y unzip \
&& yum install -y lsof ca-certificates \
&& yum install -y wget \

View File

@@ -3,6 +3,10 @@ set -e
# By default its going to deploy "Master" setup configuration with "REPLICATION_TYPE=master".
# Slave replica service can be enabled using "REPLICATION_TYPE=slave" environment value.
log_warn() {
echo -e " ====WARN==== \n$*\nWARN CODE was $LOG_WARN" >&2
}
RERANK_TEMPLATE_PATH=$PWD/solrhome/templates/rerank/conf
NORERANK_TEMPLATE_PATH=$PWD/solrhome/templates/noRerank/conf
SOLR_RERANK_CONFIG_FILE=$RERANK_TEMPLATE_PATH/solrconfig.xml
@@ -86,17 +90,26 @@ if [[ ! -z "$SOLR_JAVA_MEM" ]]; then
fi
# By default Docker Image is using TLS Mutual Authentication (SSL) for communications with Repository
# Plain HTTP can be enabled by setting ALFRESCO_SECURE_COMMS to 'none'
if [[ "none" == "$ALFRESCO_SECURE_COMMS" ]]; then
sed -i 's/alfresco.secureComms=https/alfresco.secureComms=none/' $SOLR_RERANK_CORE_FILE $SOLR_NORERANK_CORE_FILE
# Apply also the setting to existing SOLR cores property files when existing
if [[ -f ${PWD}/solrhome/alfresco/conf/solrcore.properties ]]; then
sed -i 's/alfresco.secureComms=https/alfresco.secureComms=none/' ${PWD}/solrhome/alfresco/conf/solrcore.properties
fi
if [[ -f ${PWD}/solrhome/archive/conf/solrcore.properties ]]; then
sed -i 's/alfresco.secureComms=https/alfresco.secureComms=none/' ${PWD}/solrhome/archive/conf/solrcore.properties
fi
fi
# Plain HTTP with a secret word in the request header can be enabled by setting ALFRESCO_SECURE_COMMS to 'secret',
# the secret word should be defined as a JVM argument like so: JAVA_TOOL_OPTIONS="-Dalfresco.secureComms.secret=my-secret-value"
case "$ALFRESCO_SECURE_COMMS" in
secret)
sed -i "s/alfresco.secureComms=https/alfresco.secureComms=secret\n/" $SOLR_RERANK_CORE_FILE $SOLR_NORERANK_CORE_FILE
if [[ -f ${PWD}/solrhome/alfresco/conf/solrcore.properties ]]; then
sed -i "s/alfresco.secureComms=https/alfresco.secureComms=secret\n/" ${PWD}/solrhome/alfresco/conf/solrcore.properties
fi
if [[ -f ${PWD}/solrhome/archive/conf/solrcore.properties ]]; then
sed -i "s/alfresco.secureComms=https/alfresco.secureComms=secret\n/" ${PWD}/solrhome/archive/conf/solrcore.properties
fi
;;
https|'')
;;
*)
LOG_WARN=1
;;
esac
[ -z $LOG_WARN ] || log_warn "something was wrong with the authentication config, defaulting to https mTLS auth.\nIf mTLS is not properly configured Search service might not work"
if [[ true == "$ENABLE_SPELLCHECK" ]]; then
sed -i 's/#alfresco.suggestable.property/alfresco.suggestable.property/' ${PWD}/solrhome/conf/shared.properties

View File

@@ -0,0 +1,54 @@
/*--
Copyright (C) 2000-2012 Jason Hunter & Brett McLaughlin.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions, and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions, and the disclaimer that follows
these conditions in the documentation and/or other materials
provided with the distribution.
3. The name "JDOM" must not be used to endorse or promote products
derived from this software without prior written permission. For
written permission, please contact <request_AT_jdom_DOT_org>.
4. Products derived from this software may not be called "JDOM", nor
may "JDOM" appear in their name, without prior written permission
from the JDOM Project Management <request_AT_jdom_DOT_org>.
In addition, we request (but do not require) that you include in the
end-user documentation provided with the redistribution and/or in the
software itself an acknowledgement equivalent to the following:
"This product includes software developed by the
JDOM Project (http://www.jdom.org/)."
Alternatively, the acknowledgment may be graphical using the logos
available at http://www.jdom.org/images/logos.
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE JDOM AUTHORS OR THE PROJECT
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
This software consists of voluntary contributions made by many
individuals on behalf of the JDOM Project and was originally
created by Jason Hunter <jhunter_AT_jdom_DOT_org> and
Brett McLaughlin <brett_AT_jdom_DOT_org>. For more information
on the JDOM Project, please see <http://www.jdom.org/>.
*/

View File

@@ -0,0 +1,39 @@
Copyright (c) 2005 Sun Microsystems, Inc.
Copyright © 2010-2014 University of Manchester
Copyright © 2010-2015 Stian Soiland-Reyes
Copyright © 2015 Peter Hull
All Rights Reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistribution of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistribution in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
Neither the name of Sun Microsystems, Inc. or the names of
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
This software is provided "AS IS," without a warranty of any
kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
EXCLUDED. SUN MIDROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.
You acknowledge that this software is not designed or intended for
use in the design, construction, operation or maintenance of any
nuclear facility.

View File

@@ -0,0 +1,29 @@
BSD 3-Clause License
Copyright (c) 2020, Adobe
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@@ -0,0 +1,29 @@
Copyright (c) 2009, Adobe Systems Incorporated All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or other
materials provided with the distribution.
* Neither the name of Adobe Systems Incorporated, nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANT ABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
https://www.adobe.com/devnet/xmp/library/eula-xmp-library-java.html

View File

@@ -0,0 +1,20 @@
dd-plist - An open source library to parse and generate property lists
Copyright (C) 2016 Daniel Dreibrodt
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,20 @@
Copyright JS Foundation and other contributors, https://js.foundation/
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,21 @@
The MIT License
Copyright (c) 2009-2020 Jonathan Hedley <https://jsoup.org/>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,44 @@
Copyright (c) 2005-2019 Alfresco Software, Ltd. and others.
All rights reserved. This program and the accompanying materials are made available under the terms of the Alfresco
agreement located at www.alfresco.com/legal/agreements/ or other commercial agreement between Alfresco Software, Ltd.
("Alfresco") and the user of this program.
Portions of this product distribution require certain copyright or other notices to
be included with the product distribution. These notices may also appear in individual
source files.
Below is the list of licenses and modules used under corresponding licenses. Licenses for libraries used by Apache Solr are listed separately.
=== MIT ===
jquery-3.5.1.min.js https://github.com/jquery/jquery/blob/3.5.1/LICENSE.txt
=== Part of Apache Solr (Licenses listed separately) ===
angular-chosen.js
angular-cookies.js
angular-cookies.min.js
angular-resource.min.js
angular-route.js
angular-route.min.js
angular-sanitize.js
angular-sanitize.min.js
angular.js
angular.min.js
chosen.jquery.js
chosen.jquery.min.js
chosen.js
console.js
d3.js
highlight.js
jquery.ajaxfileupload.js
jquery.blockUI.js
jquery.cookie.js
jquery.form.js
jquery.jstree.js
jquery.sammy.js
jquery.timeago.js
linker.js
naturalSort.js
ngtimeago.js
order.js
ZeroClipboard.js

View File

@@ -35,9 +35,10 @@ json-20210307.jar https://github.com/stleary/JSON-java
xml-resolver-1.2.jar https://github.com/FasterXML/jackson
neethi-3.1.1.jar http://ws.apache.org/commons/neethi/
commons-dbcp-1.4.jar http://jakarta.apache.org/commons/
commons-logging-1.1.3.jar http://jakarta.apache.org/commons/
commons-logging-1.2.jar http://jakarta.apache.org/commons/
commons-lang3-3.11.jar http://jakarta.apache.org/commons/
commons-pool-1.5.4.jar http://jakarta.apache.org/commons/
commons-compress-1.21.jar https://commons.apache.org/proper/commons-compress/
chemistry-opencmis-commons-impl-1.1.0.jar http://chemistry.apache.org/
chemistry-opencmis-commons-api-1.1.0.jar http://chemistry.apache.org/
xmlschema-core-2.2.5.jar http://ws.apache.org/commons/XmlSchema/
@@ -58,39 +59,68 @@ quartz-2.3.2.jar http://quartz-scheduler.org/
jackson-core-2.12.2.jar https://github.com/FasterXML/jackson
jackson-annotations-2.12.2.jar https://github.com/FasterXML/jackson
jackson-databind-2.12.2.jar https://github.com/FasterXML/jackson
commons-httpclient-3.1-HTTPCLIENT-1265.jar http://jakarta.apache.org/commons/
spring-aop-5.3.3.jar http://projects.spring.io/spring-framework/
spring-beans-5.3.3.jar http://projects.spring.io/spring-framework/
spring-context-5.3.3.jar http://projects.spring.io/spring-framework/
spring-core-5.3.3.jar http://projects.spring.io/spring-framework/
spring-expression-5.3.3.jar http://projects.spring.io/spring-framework/
spring-jdbc-5.3.3.jar http://projects.spring.io/spring-framework/
spring-orm-5.3.3.jar http://projects.spring.io/spring-framework/
spring-tx-5.3.3.jar http://projects.spring.io/spring-framework/
xercesImpl-2.12.1.jar http://xerces.apache.org/xerces2-j
jackson-dataformat-smile-2.12.2.jar https://github.com/FasterXML/jackson
commons-httpclient-3.1.jar http://jakarta.apache.org/commons/
spring-aop-5.3.7.jar http://projects.spring.io/spring-framework/
spring-beans-5.3.7.jar http://projects.spring.io/spring-framework/
spring-context-5.3.7.jar http://projects.spring.io/spring-framework/
spring-core-5.3.7.jar http://projects.spring.io/spring-framework/
spring-expression-5.3.7.jar http://projects.spring.io/spring-framework/
spring-jdbc-5.3.7.jar http://projects.spring.io/spring-framework/
spring-orm-5.3.7.jar http://projects.spring.io/spring-framework/
spring-tx-5.3.7.jar http://projects.spring.io/spring-framework/
guessencoding-1.4.jar http://docs.codehaus.org/display/GUESSENC/
jug-2.0.0-asl.jar http://jug.safehaus.org/
acegi-security-0.8.2_patched.jar http://sourceforge.net/projects/acegisecurity/
jetty-continuation-9.3.29.v20201019.jar https://www.eclipse.org/jetty/licenses.html
jetty-deploy-9.3.29.v20201019.jar https://www.eclipse.org/jetty/licenses.html
jetty-http-9.3.29.v20201019.jar https://www.eclipse.org/jetty/licenses.html
jetty-io-9.3.29.v20201019.jar https://www.eclipse.org/jetty/licenses.html
jetty-jmx-9.3.29.v20201019.jar https://www.eclipse.org/jetty/licenses.html
jetty-rewrite-9.3.29.v20201019.jar https://www.eclipse.org/jetty/licenses.html
jetty-security-9.3.29.v20201019.jar https://www.eclipse.org/jetty/licenses.html
jetty-server-9.3.29.v20201019.jar https://www.eclipse.org/jetty/licenses.html
jetty-servlet-9.3.29.v20201019.jar https://www.eclipse.org/jetty/licenses.html
jetty-servlets-9.3.29.v20201019.jar https://www.eclipse.org/jetty/licenses.html
jetty-util-9.3.29.v20201019.jar https://www.eclipse.org/jetty/licenses.html
jetty-webapp-9.3.29.v20201019.jar https://www.eclipse.org/jetty/licenses.html
jetty-xml-9.3.29.v20201019.jar https://www.eclipse.org/jetty/licenses.html
jetty-continuation-9.3.30.v20211001.jar https://www.eclipse.org/jetty/licenses.html
jetty-deploy-9.3.30.v20211001.jar https://www.eclipse.org/jetty/licenses.html
jetty-http-9.3.30.v20211001.jar https://www.eclipse.org/jetty/licenses.html
jetty-io-9.3.30.v20211001.jar https://www.eclipse.org/jetty/licenses.html
jetty-jmx-9.3.30.v20211001.jar https://www.eclipse.org/jetty/licenses.html
jetty-rewrite-9.3.30.v20211001.jar https://www.eclipse.org/jetty/licenses.html
jetty-security-9.3.30.v20211001.jar https://www.eclipse.org/jetty/licenses.html
jetty-server-9.3.30.v20211001.jar https://www.eclipse.org/jetty/licenses.html
jetty-servlet-9.3.30.v20211001.jar https://www.eclipse.org/jetty/licenses.html
jetty-servlets-9.3.30.v20211001.jar https://www.eclipse.org/jetty/licenses.html
jetty-util-9.3.30.v20211001.jar https://www.eclipse.org/jetty/licenses.html
jetty-webapp-9.3.30.v20211001.jar https://www.eclipse.org/jetty/licenses.html
jetty-xml-9.3.30.v20211001.jar https://www.eclipse.org/jetty/licenses.html
woodstox-core-5.0.3.jar https://github.com/FasterXML/woodstox
aggdesigner-algorithm-6.0.jar https://github.com/julianhyde/aggdesigner
tika-core-1.27.jar https://github.com/apache/tika/blob/1.27/LICENSE.txt
tika-java7-1.27.jar https://github.com/apache/tika/blob/1.27/LICENSE.txt
tika-parsers-1.27.jar https://github.com/apache/tika/blob/1.27/LICENSE.txt
tika-xmp-1.27.jar https://github.com/apache/tika/blob/1.27/LICENSE.txt
jcommander-1.81.jar https://github.com/cbeust/jcommander/blob/1.81/license.txt
preflight-2.0.24.jar https://github.com/apache/pdfbox/blob/2.0.24/LICENSE.txt
xmpbox-2.0.24.jar https://github.com/apache/pdfbox/blob/2.0.24/LICENSE.txt
isoparser-1.9.41.7.jar http://www.apache.org/licenses/LICENSE-2.0.txt
metadata-extractor-2.15.0.1.jar http://www.apache.org/licenses/LICENSE-2.0.txt
hadoop-annotations-2.7.7.jar https://github.com/apache/hadoop/blob/rel/release-2.7.7/LICENSE.txt
hadoop-auth-2.7.7.jar https://github.com/apache/hadoop/blob/rel/release-2.7.7/LICENSE.txt
hadoop-common-2.7.7.jar https://github.com/apache/hadoop/blob/rel/release-2.7.7/LICENSE.txt
hadoop-hdfs-2.7.7.jar https://github.com/apache/hadoop/blob/rel/release-2.7.7/LICENSE.txt
hadoop-annotations-2.10.1.jar https://github.com/apache/hadoop/blob/rel/release-2.10.1/LICENSE.txt
hadoop-auth-2.10.1.jar https://github.com/apache/hadoop/blob/rel/release-2.10.1/LICENSE.txt
hadoop-common-2.10.1.jar https://github.com/apache/hadoop/blob/rel/release-2.10.1/LICENSE.txt
hadoop-hdfs-2.10.1.jar https://github.com/apache/hadoop/blob/rel/release-2.10.1/LICENSE.txt
hadoop-hdfs-client-2.10.1.jar https://github.com/apache/hadoop/blob/rel/release-2.10.1/LICENSE.txt
org.restlet-2.3.12.jar https://github.com/restlet/restlet-framework-java/blob/2.3.12/README.md
org.restlet.ext.servlet-2.3.12.jar https://github.com/restlet/restlet-framework-java/blob/2.3.12/README.md
xercesImpl-2.11.0-alfresco-patched-20180402.jar http://www.apache.org/licenses/LICENSE-2.0.txt
xml-apis-1.4.01.jar http://www.apache.org/licenses/LICENSE-2.0.txt
simple-xml-safe-2.7.1.jar https://github.com/carrotsearch/simplexml-safe/blob/release/2.7.1/LICENSE.txt
reload4j-1.2.18.3.jar http://www.apache.org/licenses/LICENSE-2.0.txt
jackson-core-asl-1.9.14-atlassian-6.jar http://www.apache.org/licenses/LICENSE-2.0.txt
jackson-mapper-asl-1.9.14-atlassian-6.jar http://www.apache.org/licenses/LICENSE-2.0.txt
=== Creative Commons Public Domain ===
jcip-annotations-1.0.jar https://jcip.net/listings.html
=== CDDL 1.1 ===
jaxb-impl-2.3.3.jar http://jaxb.java.net/
jaxb-runtime-2.3.3.jar http://jaxb.java.net/
jaxb-xjc-2.3.3.jar http://jaxb.java.net/
@@ -98,9 +128,14 @@ jaxb-xjc-2.3.3.jar http://jaxb.java.net/
jakarta.activation-1.2.2.jar https://eclipse-ee4j.github.io/jaf
jakarta.activation-api-1.2.2.jar https://eclipse-ee4j.github.io/jaf
jakarta.jws-api-2.1.0.jar https://projects.eclipse.org/projects/ee4j.websocket/releases/1.1.1
jakarta.xml.bind-api-2.3.3.jar https://projects.eclipse.org/projects/ee4j.jaxb
jakarta.xml.bind-api-3.0.1.jar https://projects.eclipse.org/projects/ee4j.jaxb
istack-commons-runtime-3.0.11.jar https://projects.eclipse.org/projects/ee4j.jaxb-impl
txw2-2.3.3.jar https://eclipse-ee4j.github.io/jaxb-ri
jaxb-runtime-2.3.3.jar https://github.com/eclipse-ee4j/jaxb-ri/blob/2.3.4-RI/LICENSE.md
=== Eclipse Public License 1.0 ===
=== Eclipse Public License 2.0 ===
jakarta.annotation-api-1.3.5.jar https://projects.eclipse.org/projects/ee4j.ca
@@ -110,6 +145,12 @@ jakarta.transaction-api-1.3.3.jar https://projects.eclipse.org/projects/ee4j.jta
=== BSD ===
jibx-run-1.3.3.jar http://jibx.sourceforge.net/
dom4j-2.1.3.jar https://github.com/dom4j/dom4j/blob/master/LICENSE
xmpcore-6.1.11.jar https://github.com/adobe/XMP-Toolkit-SDK/blob/main/LICENSE
xmpcore-shaded-6.1.11.jar https://github.com/tballison/xmpcore-shaded/blob/6.1.11/src/main/resources/META-INF/LICENSE
=== MIT ===
dd-plist-1.23.jar https://github.com/3breadt/dd-plist/blob/dd-plist-1.23/LICENSE.txt
=== Part of Apache Solr (Licenses listed separately) ===
@@ -120,7 +161,6 @@ asm-5.1.jar
asm-commons-5.1.jar
aspectjrt-1.8.0.jar
attributes-binder-1.3.1.jar
avatica-core-1.9.0.jar
boilerpipe-1.1.0.jar
caffeine-2.4.0.jar
calcite-core-1.13.0.jar
@@ -134,7 +174,6 @@ commons-codec-1.10.jar
commons-collections-3.2.2.jar
commons-collections4-4.1.jar
commons-compiler-2.7.6.jar
commons-compress-1.14.jar
commons-configuration-1.6.jar
commons-exec-1.3.jar
commons-fileupload-1.3.3.jar
@@ -149,17 +188,12 @@ eigenbase-properties-1.1.5.jar
fontbox-2.0.6.jar
gmetric4j-1.0.7.jar
guava-14.0.1.jar
hadoop-annotations-2.7.4.jar
hadoop-auth-2.7.4.jar
hadoop-common-2.7.4.jar
hadoop-hdfs-2.7.4.jar
hppc-0.7.1.jar
htrace-core-3.2.0-incubating.jar
httpclient-4.4.1.jar
httpclient-4.5.13.jar
httpcore-4.4.1.jar
httpmime-4.4.1.jar
icu4j-56.1.jar
isoparser-1.1.18.jar
jackcess-2.1.8.jar
janino-2.7.6.jar
java-libpst-0.8.1.jar
@@ -173,30 +207,28 @@ jsonic-1.2.7.jar
jul-to-slf4j-1.7.7.jar
juniversalchardet-1.0.3.jar
langdetect-1.1-20120112.jar
log4j-1.2.17.jar
lucene-analyzers-common-6.6.5-patched.6.jar
lucene-analyzers-icu-6.6.5-patched.6.jar
lucene-analyzers-kuromoji-6.6.5-patched.6.jar
lucene-analyzers-morfologik-6.6.5-patched.6.jar
lucene-analyzers-phonetic-6.6.5-patched.6.jar
lucene-analyzers-smartcn-6.6.5-patched.6.jar
lucene-analyzers-stempel-6.6.5-patched.6.jar
lucene-backward-codecs-6.6.5-patched.6.jar
lucene-classification-6.6.5-patched.6.jar
lucene-codecs-6.6.5-patched.6.jar
lucene-core-6.6.5-patched.6.jar
lucene-expressions-6.6.5-patched.6.jar
lucene-grouping-6.6.5-patched.6.jar
lucene-highlighter-6.6.5-patched.6.jar
lucene-join-6.6.5-patched.6.jar
lucene-memory-6.6.5-patched.6.jar
lucene-misc-6.6.5-patched.6.jar
lucene-queries-6.6.5-patched.6.jar
lucene-queryparser-6.6.5-patched.6.jar
lucene-sandbox-6.6.5-patched.6.jar
lucene-spatial-extras-6.6.5-patched.6.jar
lucene-suggest-6.6.5-patched.6.jar
metadata-extractor-2.9.1.jar
lucene-analyzers-common-6.6.5-patched.9.jar
lucene-analyzers-icu-6.6.5-patched.9.jar
lucene-analyzers-kuromoji-6.6.5-patched.9.jar
lucene-analyzers-morfologik-6.6.5-patched.9.jar
lucene-analyzers-phonetic-6.6.5-patched.9.jar
lucene-analyzers-smartcn-6.6.5-patched.9.jar
lucene-analyzers-stempel-6.6.5-patched.9.jar
lucene-backward-codecs-6.6.5-patched.9.jar
lucene-classification-6.6.5-patched.9.jar
lucene-codecs-6.6.5-patched.9.jar
lucene-core-6.6.5-patched.9.jar
lucene-expressions-6.6.5-patched.9.jar
lucene-grouping-6.6.5-patched.9.jar
lucene-highlighter-6.6.5-patched.9.jar
lucene-join-6.6.5-patched.9.jar
lucene-memory-6.6.5-patched.9.jar
lucene-misc-6.6.5-patched.9.jar
lucene-queries-6.6.5-patched.9.jar
lucene-queryparser-6.6.5-patched.9.jar
lucene-sandbox-6.6.5-patched.9.jar
lucene-spatial-extras-6.6.5-patched.9.jar
lucene-suggest-6.6.5-patched.9.jar
metrics-core-3.2.2.jar
metrics-ganglia-3.2.2.jar
metrics-graphite-3.2.2.jar
@@ -206,35 +238,27 @@ morfologik-fsa-2.1.1.jar
morfologik-polish-2.1.1.jar
morfologik-stemming-2.1.1.jar
noggit-0.6.jar
org.restlet-2.3.0.jar
org.restlet.ext.servlet-2.3.0.jar
pdfbox-2.0.6.jar
pdfbox-tools-2.0.6.jar
poi-3.17-beta1.jar
poi-ooxml-3.17-beta1.jar
poi-ooxml-schemas-3.17-beta1.jar
poi-scratchpad-3.17-beta1.jar
protobuf-java-3.1.0.jar
protobuf-java-3.6.1.jar
rome-1.5.1.jar
simple-xml-2.7.1.jar
slf4j-api-1.7.7.jar
slf4j-log4j12-1.7.7.jar
solr-analysis-extras-6.6.5-patched.6.jar
solr-clustering-6.6.5-patched.6.jar
solr-core-6.6.5-patched.6.jar
solr-langid-6.6.5-patched.6.jar
solr-solrj-6.6.5-patched.6.jar
solr-analysis-extras-6.6.5-patched.9.jar
solr-clustering-6.6.5-patched.9.jar
solr-core-6.6.5-patched.9.jar
solr-langid-6.6.5-patched.9.jar
solr-solrj-6.6.5-patched.9.jar
spatial4j-0.6.jar
start.jar
stax2-api-3.1.4.jar
tagsoup-1.2.1.jar
t-digest-3.1.jar
tika-core-1.16.jar
tika-java7-1.16.jar
tika-parsers-1.16.jar
tika-xmp-1.16.jar
vorbis-java-core-0.8.jar
vorbis-java-tika-0.8.jar
xmpcore-5.1.2.jar
xz-1.6.jar
zookeeper-3.4.10.jar

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -4,17 +4,17 @@
<parent>
<groupId>org.alfresco</groupId>
<artifactId>alfresco-search-and-insight-parent</artifactId>
<version>2.0.2</version>
<version>2.0.3</version>
</parent>
<!-- The groupId and version are required by the maven pom extractor plugin on Bamboo - more details in this issue:
https://bitbucket.org/dehringer/bamboo-maven-pom-extractor-plugin/issues/18/groupid-not-populated-if-using-parent-pom -->
<groupId>org.alfresco</groupId>
<artifactId>alfresco-search-parent</artifactId>
<version>2.0.2</version>
<version>2.0.3</version>
<packaging>pom</packaging>
<name>Alfresco Solr Search parent</name>
<properties>
<slf4j.version>1.7.30</slf4j.version>
<slf4j.version>1.7.36</slf4j.version>
<cxf.version>3.2.14</cxf.version>
<licenseName>community</licenseName>
</properties>
@@ -71,4 +71,30 @@
<url>http://maven.restlet.talend.com</url>
</repository>
</repositories>
<dependencyManagement>
<dependencies>
<!-- spring framework (E2E tests works with different versions) -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
<version>${dependency.spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${dependency.spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context-support</artifactId>
<version>${dependency.spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
<version>${dependency.spring.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
</project>