mirror of
https://github.com/Alfresco/SearchServices.git
synced 2025-09-10 14:11:25 +00:00
Compare commits
415 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
2bc55b56fb | ||
|
e7a97781a5 | ||
|
6795a72bce | ||
|
ded818789c | ||
|
f9135204d2 | ||
|
037dbec4ea | ||
|
976d008cf1 | ||
|
ad588660ba | ||
|
bc0c8635c1 | ||
|
e912279d3d | ||
|
e428a98512 | ||
|
27dd1f3eb8 | ||
|
9f9303ba80 | ||
|
368770a347 | ||
|
93c5e30c92 | ||
|
d14223a423 | ||
|
f983e2df5b | ||
|
5a125f07e6 | ||
|
ffff6c1f3d | ||
|
5673a6686b | ||
|
88dceac4c2 | ||
|
9e73bef414 | ||
|
1bb5192796 | ||
|
6db68cee65 | ||
|
0792b19c8d | ||
|
2d33d5d6b6 | ||
|
686a08f08a | ||
|
08c2bb10e8 | ||
|
236c77b837 | ||
|
d0c5b33459 | ||
|
a91d4bfdc1 | ||
|
5d5c575174 | ||
|
ff7e35c69f | ||
|
f73c4625e6 | ||
|
4ad54df27c | ||
|
8f020dde5f | ||
|
ef3aee3164 | ||
|
a2a0305aa2 | ||
|
a4ad04d95d | ||
|
a47e1f5555 | ||
|
32bbc2507b | ||
|
ecd1abcf14 | ||
|
640fad0c17 | ||
|
e56683029c | ||
|
fcf4245a3d | ||
|
89142bedbb | ||
|
c3a9fbc14a | ||
|
6ab1778b42 | ||
|
eb1759c7cf | ||
|
1cdf7da576 | ||
|
b89a163f5c | ||
|
740057f489 | ||
|
4d26e7f61d | ||
|
55ae8c8302 | ||
|
016f147245 | ||
|
b56a8338de | ||
|
f61aad1f39 | ||
|
1d8638af61 | ||
|
737d2fa4a5 | ||
|
d252f613df | ||
|
1dbf651262 | ||
|
c32680f3e2 | ||
|
aa451f8c78 | ||
|
d3526442cc | ||
|
a804ebea28 | ||
|
19525eb461 | ||
|
7aad2549bf | ||
|
6699747a03 | ||
|
b6c99624c2 | ||
|
4d585e0166 | ||
|
9e405374a3 | ||
|
0dc2f6b56c | ||
|
7eca83d7a7 | ||
|
63efb987f0 | ||
|
ae41e0051b | ||
|
51fe763c2c | ||
|
a79fd17c68 | ||
|
c1fd4a76bb | ||
|
bc51c436d6 | ||
|
8e23f95198 | ||
|
7ba50e363c | ||
|
a657ccab35 | ||
|
efd9ea1167 | ||
|
898d2519b3 | ||
|
86d2d88fac | ||
|
3fb7fd294f | ||
|
73d891fd10 | ||
|
38cc3977b6 | ||
|
787b989912 | ||
|
709223019d | ||
|
51409e31ac | ||
|
48db0b1594 | ||
|
8dd0729b8d | ||
|
906f6fecb9 | ||
|
16da35f01e | ||
|
b5f8989f5e | ||
|
c3dd096b10 | ||
|
c7f23c27c2 | ||
|
eea5620518 | ||
|
d0bf5871a2 | ||
|
2f597ad68f | ||
|
fe640439ad | ||
|
e9de99d76a | ||
|
3b5676e684 | ||
|
8aae0779ff | ||
|
0a57d2216d | ||
|
c8f9cfa62e | ||
|
ced709b7bc | ||
|
21a9997881 | ||
|
db667eb909 | ||
|
766e143b28 | ||
|
f16631f9a8 | ||
|
c32487a922 | ||
|
3dc637df12 | ||
|
69832bd140 | ||
|
533fc2466a | ||
|
caf819272b | ||
|
32ea46f62a | ||
|
fa0e0d83d7 | ||
|
2bcfc72b36 | ||
|
656b6bf9b7 | ||
|
5e400d76c0 | ||
|
2087d23097 | ||
|
1fd16e4592 | ||
|
1b31471e23 | ||
|
f9c92cc4b4 | ||
|
362fa8a7a9 | ||
|
6067fc69f3 | ||
|
ce522fec44 | ||
|
6225430cb2 | ||
|
0937fe3e2b | ||
|
260914c979 | ||
|
fa1a59a215 | ||
|
bece920628 | ||
|
7b0ed0a492 | ||
|
7de998dd61 | ||
|
9162f05b5e | ||
|
def5cb6885 | ||
|
9843af6437 | ||
|
e1a9620b9f | ||
|
59854c7269 | ||
|
fa1407e20f | ||
|
461e1348f4 | ||
|
04da70b4cf | ||
|
fad52fbfd9 | ||
|
072912bb29 | ||
|
4d566c30b9 | ||
|
f91962d08a | ||
|
00a634e17b | ||
|
11b51bb13b | ||
|
951cee6828 | ||
|
f05cb17e56 | ||
|
6e4f1ddeb6 | ||
|
1c9aef6178 | ||
|
cb30043b35 | ||
|
a5b86b073e | ||
|
3dbf1302f3 | ||
|
10863a3223 | ||
|
e6a0f92879 | ||
|
b137b4edbf | ||
|
bb78485584 | ||
|
bcebe24409 | ||
|
e0e651322f | ||
|
0f36989a6c | ||
|
323de2e138 | ||
|
a6d50cd3ee | ||
|
ecb66f09ca | ||
|
71fa8fd497 | ||
|
8aa0150007 | ||
|
9f42b4767b | ||
|
d7b18c5e1a | ||
|
4aa916b191 | ||
|
82bc5afef5 | ||
|
becd3cf621 | ||
|
36138a3814 | ||
|
ff12f8394f | ||
|
c868741f4b | ||
|
f9ddc2a75c | ||
|
1721fee02f | ||
|
acf67f03f2 | ||
|
401fdc8b7a | ||
|
177455b925 | ||
|
64e3827e80 | ||
|
a8059c6f85 | ||
|
6a2763e67a | ||
|
4baeb26fa5 | ||
|
97deb9bda8 | ||
|
6af9766ff2 | ||
|
41882b3bae | ||
|
c739cc077e | ||
|
c889fd3a8a | ||
|
9fa398fa3d | ||
|
96e0c13feb | ||
|
6b41fb2357 | ||
|
500ff67368 | ||
|
e147fb0d34 | ||
|
842e95c449 | ||
|
e8ad9b6737 | ||
|
f13aceb87b | ||
|
524c3c3fdc | ||
|
6b5b6209e5 | ||
|
2faf48ad15 | ||
|
70887d31ae | ||
|
7a67c6ac41 | ||
|
e8e9799035 | ||
|
0225289884 | ||
|
24edfa695c | ||
|
21a0a463a1 | ||
|
3633cbbc86 | ||
|
8354f3994c | ||
|
d7fb4a4744 | ||
|
cafc00891f | ||
|
2a18f02181 | ||
|
39bc4b3c28 | ||
|
75177df453 | ||
|
c031317fbf | ||
|
28ca975ba3 | ||
|
1fc2bc88e0 | ||
|
a06b58062a | ||
|
bd0ee47f2d | ||
|
b2022bfcba | ||
|
05d294c295 | ||
|
c445201b3b | ||
|
3926da1901 | ||
|
2400cc9a2c | ||
|
076cbe032c | ||
|
750968838a | ||
|
20d1b320cf | ||
|
a3a798d7fd | ||
|
45536af8d8 | ||
|
5f88839d94 | ||
|
20205ecd91 | ||
|
d746688688 | ||
|
44fe4770a7 | ||
|
098d62b7af | ||
|
9f7bcf67b1 | ||
|
9463570b71 | ||
|
d393d05c18 | ||
|
e206b12ca4 | ||
|
9454b10fde | ||
|
a14ac99921 | ||
|
0ee9497f19 | ||
|
645fd6eede | ||
|
a247fcaca3 | ||
|
ab84260861 | ||
|
782f5d69e2 | ||
|
73e38787ce | ||
|
14ecf93bdf | ||
|
08b1040554 | ||
|
e14c44b1a5 | ||
|
05144ad615 | ||
|
b0692b21fd | ||
|
8835dba1e1 | ||
|
85f1d6c3fc | ||
|
b924576e3b | ||
|
4f503dcfe8 | ||
|
db98c95fc4 | ||
|
5f600b3c7c | ||
|
59d1e1ed88 | ||
|
f3a136cbcc | ||
|
c7c1dfd4d8 | ||
|
e274a4e0c1 | ||
|
e98a7b22fd | ||
|
5259452367 | ||
|
573d849f47 | ||
|
c63b6960ec | ||
|
f5047a2fd7 | ||
|
329c654cea | ||
|
80447a287c | ||
|
2ce8b19e5d | ||
|
d2c7df5416 | ||
|
252df880d0 | ||
|
a90d443828 | ||
|
5f04b97757 | ||
|
cc71efc717 | ||
|
7de8a99cd7 | ||
|
3411cd0c54 | ||
|
73cd9126ed | ||
|
e2755e79ca | ||
|
a6c51a6ceb | ||
|
41b87f44cc | ||
|
2314ba850b | ||
|
7c811dda31 | ||
|
bf9235fb8e | ||
|
5c240f31ba | ||
|
5bc708becd | ||
|
9461cd397f | ||
|
dc9e9eb9c4 | ||
|
d8bce2f248 | ||
|
322b55ccea | ||
|
24b554d964 | ||
|
cb29e2cbd9 | ||
|
c574912e31 | ||
|
e3899c233e | ||
|
850f484e5a | ||
|
df6f456d88 | ||
|
157aa1c309 | ||
|
31e14e3636 | ||
|
8b3eb92c2c | ||
|
8b0e64b3a1 | ||
|
28ed90df11 | ||
|
09d2d3a3a4 | ||
|
230e5f7f6c | ||
|
7adef88799 | ||
|
0c3eae0c58 | ||
|
a3ce67fa3e | ||
|
6e2bc6a21b | ||
|
07dffb6ba7 | ||
|
dc280a3ed1 | ||
|
1278e850b4 | ||
|
e5cb9dba5b | ||
|
48683f9285 | ||
|
c27b1aa14b | ||
|
2c960cc3aa | ||
|
576522fba2 | ||
|
057a3b86fd | ||
|
ae00b1d286 | ||
|
00d06831f6 | ||
|
1d9e497352 | ||
|
1ae7195791 | ||
|
d42767866f | ||
|
625710752d | ||
|
cbf42a9d6c | ||
|
b329883e67 | ||
|
5e63be3185 | ||
|
be04306591 | ||
|
9c38c27316 | ||
|
6efeea3acb | ||
|
d1a7aca700 | ||
|
05b900112b | ||
|
5e35265809 | ||
|
92626a6579 | ||
|
215efa41fe | ||
|
694f3dca4c | ||
|
30da193a4c | ||
|
b8f53c868b | ||
|
f4746c1d0b | ||
|
24c5f76dbe | ||
|
f9dd6dfa12 | ||
|
54184dfd7e | ||
|
8e6ac4f7dc | ||
|
584ce2e672 | ||
|
5ba4939dfa | ||
|
82bd696c0c | ||
|
f78885feab | ||
|
7b1508e4c9 | ||
|
c95bfcf8df | ||
|
72218571be | ||
|
07a6057a23 | ||
|
b9e47cd4b9 | ||
|
10d848ec98 | ||
|
8053d9d8a2 | ||
|
1d437a50ae | ||
|
87e7ba6f24 | ||
|
50aa0ae80f | ||
|
652674f299 | ||
|
261ed4a30d | ||
|
82e56411bf | ||
|
5922f61233 | ||
|
c555f6d400 | ||
|
7d34b99aea | ||
|
e99337e198 | ||
|
29d1c7f438 | ||
|
2b683b055c | ||
|
5080c58198 | ||
|
8ffef842db | ||
|
03a489bd90 | ||
|
8649be4032 | ||
|
9df9eaa0b2 | ||
|
f62e156c47 | ||
|
4bd42910cc | ||
|
b550cbffc8 | ||
|
4a2406f91b | ||
|
a37e441847 | ||
|
2e84ffe939 | ||
|
529ee1b7db | ||
|
fb8526b17b | ||
|
1b18dec7a4 | ||
|
b79d080f3c | ||
|
75169dee2a | ||
|
ffa58d9c19 | ||
|
59150f2c16 | ||
|
43805c9be8 | ||
|
463ed3941c | ||
|
20de4e855d | ||
|
d6a4201186 | ||
|
8637bf5c23 | ||
|
e68dd76c4f | ||
|
d4c0b26983 | ||
|
4f61b3229c | ||
|
23ea188838 | ||
|
05b9c29a06 | ||
|
5ca47e88be | ||
|
8fb763f10a | ||
|
36804ba332 | ||
|
3fc406e256 | ||
|
bb1b65cac2 | ||
|
5cdc58f6be | ||
|
5c6fb9148b | ||
|
fbc1f5eab0 | ||
|
eac4019948 | ||
|
4925ed95b7 | ||
|
d2b52f06bf | ||
|
e4933b279b | ||
|
ff4fde2eb3 | ||
|
df9c2819ca | ||
|
507e756513 | ||
|
3be1029266 | ||
|
b5c2bc3775 | ||
|
6068be6f41 | ||
|
1a423ece54 | ||
|
1163f31c80 | ||
|
b6e131f068 | ||
|
423b3da5ec | ||
|
16197a5195 |
@@ -1 +0,0 @@
|
||||
* alex.mukha@alfresco.com
|
@@ -10,6 +10,8 @@ The official documentation for this product can be found at [Alfresco Search Ser
|
||||
|
||||
The official documentation for this product can be found at [Alfresco Search and Insight Engine](https://docs.alfresco.com/sie/concepts/Search-Insight-Engine-overview.html).
|
||||
|
||||
> ⚠ The previous master branch has been renamed to `bak-master` and is deprecated. The current master used to be called `release/V2.0.x`.
|
||||
|
||||
|
||||
### Alfresco Search Services
|
||||
|
||||
|
@@ -1,33 +0,0 @@
|
||||
# see https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "maven"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
time: "22:00"
|
||||
timezone: "Europe/London"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "search-services/packaging/src/docker/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "saturday"
|
||||
time: "22:00"
|
||||
timezone: "Europe/London"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "insight-engine/packaging/src/docker/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "saturday"
|
||||
time: "22:00"
|
||||
timezone: "Europe/London"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "insight-engine/alfresco-insight-zeppelin/src/docker/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "saturday"
|
||||
time: "22:00"
|
||||
timezone: "Europe/London"
|
@@ -17,10 +17,10 @@ $ tree generators/app/templates/
|
||||
│ ├── .env
|
||||
│ ├── docker-compose-ce.yml
|
||||
│ └── docker-compose-ee.yml
|
||||
├── latest
|
||||
├── 7.0
|
||||
│ ├── .env
|
||||
│ ├── docker-compose-ce.yml
|
||||
│ └── docker-compose-ee.yml├── empty
|
||||
│ └── docker-compose-ee.yml
|
||||
├── images
|
||||
│ ├── alfresco
|
||||
│ │ ├── Dockerfile
|
||||
@@ -82,7 +82,7 @@ $ yo alfresco-docker-compose
|
||||
|
||||
## ACS Version
|
||||
|
||||
Currently supported ACS Versions are `latest`, `6.2` and `6.1`
|
||||
Currently supported ACS Versions are `7.0`, `6.2` and `6.1`
|
||||
|
||||
This is the first choice to be selected when the generator is executed.
|
||||
|
||||
@@ -90,7 +90,7 @@ This is the first choice to be selected when the generator is executed.
|
||||
? Which ACS version do you want to use?
|
||||
6.1
|
||||
6.2
|
||||
❯ latest
|
||||
❯ 7.0
|
||||
```
|
||||
|
||||
## AGS Version
|
||||
@@ -107,14 +107,15 @@ If you chose ACS 6.1, a prompt will allow you to use AGS.
|
||||
|
||||
When using Community, some different options can be combined:
|
||||
|
||||
* Plain HTTP (http) or TLS/SSL Mutual Authentication (https) for communication between Alfresco and SOLR
|
||||
* Plain HTTP (http) or HTTPs (https) for Http Web Proxy for HTTP access to services
|
||||
* Protect the access to SOLR REST API in the Http WebProxy to forbid direct access to Alfresco Web Proxy port
|
||||
* Use SOLR Replication in Master/Slave mode (only when using http)
|
||||
* Plain HTTP (http), Shared Secret HTTP (secret) or TLS/SSL Mutual Authentication (https) for communication between Alfresco and SOLR
|
||||
>> Shared Secret is only available from ACS 7.0.1
|
||||
|
||||
```
|
||||
? Would you like to use Alfresco enterprise or community? community
|
||||
? Would you like to use HTTP or mTLS for Alfresco-SOLR communication? http
|
||||
? Would you like to use HTTP, Shared Secret or mTLS for Alfresco-SOLR communication? http
|
||||
? Would you like to use HTTP or HTTPs for Web Proxy? http
|
||||
? Would you like to protect the access to SOLR REST API? Yes
|
||||
? Would you like to use a SOLR Replication? No
|
||||
|
@@ -21,8 +21,8 @@ module.exports = class extends Generator {
|
||||
type: 'list',
|
||||
name: 'acsVersion',
|
||||
message: 'Which ACS version do you want to use?',
|
||||
choices: [ '6.1', '6.2', 'latest' ],
|
||||
default: 'latest'
|
||||
choices: [ '6.1', '6.2', '7.0' ],
|
||||
default: '7.0'
|
||||
},
|
||||
{
|
||||
whenFunction: response => response.acsVersion == '6.1',
|
||||
@@ -41,8 +41,8 @@ module.exports = class extends Generator {
|
||||
{
|
||||
type: 'list',
|
||||
name: 'httpMode',
|
||||
message: 'Would you like to use HTTP or mTLS for Alfresco-SOLR communication?',
|
||||
choices: [ "http", "https" ],
|
||||
message: 'Would you like to use HTTP, Shared Secret or mTLS for Alfresco-SOLR communication?',
|
||||
choices: [ "http", "https", "secret" ],
|
||||
default: 'http'
|
||||
},
|
||||
{
|
||||
@@ -156,7 +156,7 @@ module.exports = class extends Generator {
|
||||
// Generate boilerplate from "templates" folder
|
||||
writing() {
|
||||
|
||||
// Set base template directory: 6.1, 6.2, latest
|
||||
// Set base template directory: 6.1, 6.2, 7.0
|
||||
var dockerComposeTemplateDirectory = this.props.acsVersion;
|
||||
|
||||
// Docker Compose environment variables values
|
||||
@@ -183,7 +183,7 @@ module.exports = class extends Generator {
|
||||
'alfresco/alfresco-content-repository-community') :
|
||||
(this.props.ags ?
|
||||
'quay.io/alfresco/alfresco-governance-repository-enterprise':
|
||||
'alfresco/alfresco-content-repository'
|
||||
'quay.io/alfresco/alfresco-content-repository'
|
||||
)
|
||||
);
|
||||
|
||||
@@ -192,7 +192,7 @@ module.exports = class extends Generator {
|
||||
(this.props.alfrescoVersion == 'community' ?
|
||||
(this.props.ags ?
|
||||
'alfresco/alfresco-governance-share-community' :
|
||||
'quay.io/alfresco/alfresco-share') :
|
||||
'alfresco/alfresco-share') :
|
||||
(this.props.ags ?
|
||||
'quay.io/alfresco/alfresco-governance-share-enterprise':
|
||||
'quay.io/alfresco/alfresco-share'
|
||||
@@ -219,8 +219,8 @@ module.exports = class extends Generator {
|
||||
httpMode: this.props.httpMode,
|
||||
httpWebMode: this.props.httpWebMode,
|
||||
port: (this.props.httpWebMode == 'http' ? '8080' : '443'),
|
||||
secureComms: (this.props.httpMode == 'http' ? 'none' : 'https'),
|
||||
alfrescoPort: (this.props.httpMode == 'http' ? '8080' : '8443'),
|
||||
secureComms: (this.props.httpMode == 'http' ? 'none' : this.props.httpMode),
|
||||
alfrescoPort: (this.props.httpMode == 'https' ? '8443' : '8080'),
|
||||
replication: this.props.replication,
|
||||
searchSolrHost: (this.props.replication ? "solr6secondary" : "solr6"),
|
||||
searchPath: searchBasePath,
|
||||
@@ -279,7 +279,7 @@ module.exports = class extends Generator {
|
||||
)
|
||||
}
|
||||
|
||||
// Empty addons directories.
|
||||
// Empty addons directories
|
||||
['alfresco', 'share'].forEach(container => {
|
||||
['jars', 'amps'].forEach(addonType => {
|
||||
this.fs.copy(
|
||||
|
13
e2e-test/generator-alfresco-docker-compose/generators/app/templates/7.0/.env
Executable file
13
e2e-test/generator-alfresco-docker-compose/generators/app/templates/7.0/.env
Executable file
@@ -0,0 +1,13 @@
|
||||
ALFRESCO_TAG=7.0.0
|
||||
ALFRESCO_CE_TAG=7.0.0
|
||||
SHARE_TAG=7.0.0
|
||||
POSTGRES_TAG=13.1
|
||||
TRANSFORM_CORE_AIO_TAG=2.3.10
|
||||
TRANSFORM_ROUTER_TAG=1.3.2
|
||||
SHARED_FILE_STORE_TAG=0.13.0
|
||||
ACTIVE_MQ_TAG=5.16.1
|
||||
DIGITAL_WORKSPACE_TAG=2.1.0-adw
|
||||
ACS_NGINX_TAG=3.1.1
|
||||
SEARCH_TAG=latest
|
||||
ZEPPELIN_TAG=latest
|
||||
ACA_TAG=2.3.0
|
@@ -14,8 +14,21 @@ services:
|
||||
KEYSTORE_TYPE: JCEKS
|
||||
KEYSTORE_PASS: kT9X6oe68t <% } %>
|
||||
COMPRESS_CONTENT: "<%=gzip%>"
|
||||
SHARE_SERVICES_TAG: ${SHARE_TAG}
|
||||
mem_limit: 1800m
|
||||
depends_on:
|
||||
- postgres
|
||||
environment:
|
||||
JAVA_TOOL_OPTIONS: "
|
||||
-Dencryption.keystore.type=JCEKS
|
||||
-Dencryption.cipherAlgorithm=DESede/CBC/PKCS5Padding
|
||||
-Dencryption.keyAlgorithm=DESede
|
||||
-Dencryption.keystore.location=/usr/local/tomcat/shared/classes/alfresco/extension/keystore/keystore
|
||||
-Dmetadata-keystore.password=mp6yc0UD9e
|
||||
-Dmetadata-keystore.aliases=metadata
|
||||
-Dmetadata-keystore.metadata.password=oKIWzVdEdA
|
||||
-Dmetadata-keystore.metadata.algorithm=DESede
|
||||
"
|
||||
JAVA_OPTS : "
|
||||
-Ddb.driver=org.postgresql.Driver
|
||||
-Ddb.username=alfresco
|
||||
@@ -25,20 +38,19 @@ services:
|
||||
-Dsolr.port.ssl=8983
|
||||
-Dsolr.secureComms=<%=secureComms%>
|
||||
-Dsolr.baseUrl=/solr
|
||||
-Dindex.subsystem.name=solr6
|
||||
-Dindex.subsystem.name=solr6<% if (httpMode == 'secret') { %>
|
||||
-Dsolr.sharedSecret=secret<% } %>
|
||||
-Dshare.host=localhost
|
||||
-Dalfresco.port=8080
|
||||
-Daos.baseUrlOverwrite=http://localhost:8080/alfresco/aos
|
||||
-Dmessaging.broker.url=\"failover:(nio://activemq:61616)?timeout=3000&jms.useCompression=true\"
|
||||
-Ddeployment.method=DOCKER_COMPOSE
|
||||
|
||||
-DlocalTransform.core-aio.url=http://transform-core-aio:8090/
|
||||
-Dalfresco-pdf-renderer.url=http://transform-core-aio:8090/
|
||||
-Djodconverter.url=http://transform-core-aio:8090/
|
||||
-Dimg.url=http://transform-core-aio:8090/
|
||||
-Dtika.url=http://transform-core-aio:8090/
|
||||
-Dtransform.misc.url=http://transform-core-aio:8090/
|
||||
|
||||
-Dcsrf.filter.enabled=false
|
||||
-Dalfresco.restApi.basicAuthScheme=true
|
||||
-Xms1500m -Xmx1500m
|
||||
@@ -63,6 +75,8 @@ services:
|
||||
COMPRESS_CONTENT: "<%=gzip%>"
|
||||
SEARCH_LOG_LEVEL: <%=searchLogLevel%>
|
||||
mem_limit: 1200m
|
||||
depends_on:
|
||||
- alfresco
|
||||
environment:
|
||||
#Solr needs to know how to register itself with Alfresco
|
||||
SOLR_ALFRESCO_HOST: "alfresco"
|
||||
@@ -84,6 +98,9 @@ services:
|
||||
SOLR_OPTS: "
|
||||
-Dsolr.ssl.checkPeerName=false
|
||||
-Dsolr.allow.unsafe.resourceloading=true
|
||||
" <% } %> <% if (httpMode == 'secret') { %>
|
||||
SOLR_OPTS: "
|
||||
-Dalfresco.secureComms.secret=secret
|
||||
" <% } %>
|
||||
ports:
|
||||
- 8083:8983 <% if (httpMode == 'https') { %>
|
||||
@@ -106,6 +123,8 @@ services:
|
||||
MASTER_HOST: solr6 <% } %>
|
||||
COMPRESS_CONTENT: "<%=gzip%>"
|
||||
mem_limit: 1200m
|
||||
depends_on:
|
||||
- alfresco
|
||||
environment:
|
||||
#Solr needs to know how to register itself with Alfresco
|
||||
SOLR_ALFRESCO_HOST: "alfresco"
|
||||
@@ -127,6 +146,9 @@ services:
|
||||
SOLR_OPTS: "
|
||||
-Dsolr.ssl.checkPeerName=false
|
||||
-Dsolr.allow.unsafe.resourceloading=true
|
||||
" <% } %> <% if (httpMode == 'secret') { %>
|
||||
SOLR_OPTS: "
|
||||
-Dalfresco.secureComms.secret=secret
|
||||
" <% } %>
|
||||
ports:
|
||||
- 8084:8983 <% if (httpMode == 'https') { %>
|
||||
@@ -193,6 +215,9 @@ services:
|
||||
mem_limit: 128m
|
||||
depends_on:
|
||||
- alfresco
|
||||
- share
|
||||
- solr6
|
||||
- content-app
|
||||
volumes:
|
||||
- ./config/nginx.conf:/etc/nginx/nginx.conf
|
||||
- ./config/nginx.htpasswd:/etc/nginx/conf.d/nginx.htpasswd <% if (httpWebMode == 'https') { %>
|
@@ -13,17 +13,19 @@ services:
|
||||
KEYSTORE_TYPE: JCEKS
|
||||
KEYSTORE_PASS: kT9X6oe68t <% } %>
|
||||
COMPRESS_CONTENT: "<%=gzip%>"
|
||||
SHARE_SERVICES_TAG: ${SHARE_TAG}
|
||||
mem_limit: 1800m
|
||||
environment:
|
||||
JAVA_TOOL_OPTIONS: "
|
||||
-Dencryption.keystore.type=JCEKS
|
||||
-Dencryption.cipherAlgorithm=DESede/CBC/PKCS5Padding
|
||||
-Dencryption.keyAlgorithm=DESede
|
||||
-Dencryption.keystore.location=/usr/local/tomcat/shared/classes/alfresco/extension/keystore/keystore
|
||||
-Dmetadata-keystore.password=mp6yc0UD9e
|
||||
-Dmetadata-keystore.aliases=metadata
|
||||
-Dmetadata-keystore.metadata.password=mp6yc0UD9e
|
||||
-Dmetadata-keystore.metadata.algorithm=AES
|
||||
-Dmetadata-keystore.metadata.password=oKIWzVdEdA
|
||||
-Dmetadata-keystore.metadata.algorithm=DESede
|
||||
<% if (httpMode == 'https') { %>
|
||||
-Dencryption.keystore.type=pkcs12
|
||||
-Dencryption.cipherAlgorithm=AES/CBC/PKCS5Padding
|
||||
-Dencryption.keyAlgorithm=AES
|
||||
-Dssl-keystore.password=kT9X6oe68t
|
||||
-Dssl-keystore.aliases=ssl-alfresco-ca,ssl-repo
|
||||
-Dssl-keystore.ssl-alfresco-ca.password=kT9X6oe68t
|
||||
@@ -43,12 +45,11 @@ services:
|
||||
-Dsolr.secureComms=<%=secureComms%>
|
||||
-Dsolr.baseUrl=/solr <% if (sharding == 'true') { %>
|
||||
-Dsolr.useDynamicShardRegistration=true <% } %>
|
||||
-Dindex.subsystem.name=solr6
|
||||
|
||||
-Dindex.subsystem.name=solr6<% if (httpMode == 'secret') { %>
|
||||
-Dsolr.sharedSecret=secret<% } %>
|
||||
-Daos.baseUrlOverwrite=http://localhost:8080/alfresco/aos
|
||||
-Dmessaging.broker.url=\"failover:(nio://activemq:61616)?timeout=3000&jms.useCompression=true\"
|
||||
-Ddeployment.method=DOCKER_COMPOSE
|
||||
|
||||
-Dtransform.service.enabled=true
|
||||
-Dtransform.service.url=http://transform-router:8095
|
||||
-Dsfs.url=http://shared-file-store:8099/
|
||||
@@ -58,7 +59,6 @@ services:
|
||||
-Dimg.url=http://transform-core-aio:8090/
|
||||
-Dtika.url=http://transform-core-aio:8090/
|
||||
-Dtransform.misc.url=http://transform-core-aio:8090/
|
||||
|
||||
-Dcsrf.filter.enabled=false
|
||||
-Dalfresco.restApi.basicAuthScheme=true
|
||||
-Xms1500m -Xmx1500m
|
||||
@@ -104,33 +104,19 @@ services:
|
||||
#Create the default alfresco and archive cores
|
||||
SOLR_CREATE_ALFRESCO_DEFAULTS: "alfresco,archive"
|
||||
SOLR_JAVA_MEM: "-Xms1g -Xmx1g" <% if (httpMode == 'https') { %>
|
||||
SOLR_SSL_TRUST_STORE: "/opt/<%=searchPath%>/keystore/ssl-repo-client.truststore"
|
||||
SOLR_SSL_TRUST_STORE: "/opt/<%=searchPath%>/keystore/ssl.repo.client.truststore"
|
||||
SOLR_SSL_TRUST_STORE_PASSWORD: "kT9X6oe68t"
|
||||
SOLR_SSL_TRUST_STORE_TYPE: "JCEKS"
|
||||
SOLR_SSL_KEY_STORE: "/opt/<%=searchPath%>/keystore/ssl-repo-client.keystore"
|
||||
SOLR_SSL_KEY_STORE: "/opt/<%=searchPath%>/keystore/ssl.repo.client.keystore"
|
||||
SOLR_SSL_KEY_STORE_PASSWORD: "kT9X6oe68t"
|
||||
SOLR_SSL_KEY_STORE_TYPE: "JCEKS"
|
||||
SOLR_SSL_NEED_CLIENT_AUTH: "true" <% if (sharding == 'true') { %>
|
||||
SOLR_SSL_CLIENT_KEY_STORE: "/opt/<%=searchPath%>/keystore/ssl-repo-client.keystore"
|
||||
SOLR_SSL_CLIENT_KEY_STORE_TYPE: "JCEKS"
|
||||
SOLR_SSL_CLIENT_TRUST_STORE: "/opt/<%=searchPath%>/keystore/ssl-repo-client.keystore"
|
||||
SOLR_SSL_CLIENT_TRUST_STORE_TYPE: "JCEKS" <% } %>
|
||||
JAVA_TOOL_OPTIONS: "
|
||||
-Dsolr.jetty.truststore.password=kT9X6oe68t
|
||||
-Dsolr.jetty.keystore.password=kT9X6oe68t <% if (sharding == 'true') { %>
|
||||
-Djavax.net.ssl.keyStorePassword=kT9X6oe68t
|
||||
-Djavax.net.ssl.trustStorePassword=kT9X6oe68t <% } %>
|
||||
-Dssl-keystore.password=kT9X6oe68t
|
||||
-Dssl-keystore.aliases=ssl-alfresco-ca,ssl-repo-client
|
||||
-Dssl-keystore.ssl-alfresco-ca.password=kT9X6oe68t
|
||||
-Dssl-keystore.ssl-repo-client.password=kT9X6oe68t
|
||||
-Dssl-truststore.password=kT9X6oe68t
|
||||
-Dssl-truststore.aliases=ssl-alfresco-ca,ssl-repo,ssl-repo-client
|
||||
-Dssl-truststore.ssl-alfresco-ca.password=kT9X6oe68t
|
||||
-Dssl-truststore.ssl-repo.password=kT9X6oe68t
|
||||
-Dssl-truststore.ssl-repo-client.password=kT9X6oe68t
|
||||
"
|
||||
SOLR_SSL_NEED_CLIENT_AUTH: "true"
|
||||
SOLR_OPTS: "
|
||||
-Dsolr.ssl.checkPeerName=false
|
||||
-Dsolr.allow.unsafe.resourceloading=true
|
||||
" <% } %> <% if (httpMode == 'secret') { %>
|
||||
SOLR_OPTS: "
|
||||
-Dalfresco.secureComms.secret=secret
|
||||
" <% } %>
|
||||
ports:
|
||||
- 8083:8983 <% if (httpMode == 'https') { %>
|
||||
@@ -169,41 +155,27 @@ services:
|
||||
SOLR_ALFRESCO_PORT: "<%=alfrescoPort%>"
|
||||
ALFRESCO_SECURE_COMMS: <%=secureComms%>
|
||||
#Alfresco needs to know how to call solr
|
||||
SOLR_SOLR_HOST: "solr6secondary"
|
||||
SOLR_SOLR_HOST: "solr6"
|
||||
SOLR_SOLR_PORT: "8983"
|
||||
#Create the default alfresco and archive cores
|
||||
SOLR_CREATE_ALFRESCO_DEFAULTS: "alfresco,archive"
|
||||
SOLR_JAVA_MEM: "-Xms1g -Xmx1g" <% if (httpMode == 'https') { %>
|
||||
SOLR_SSL_TRUST_STORE: "/opt/<%=searchPath%>/keystore/ssl-repo-client.truststore"
|
||||
SOLR_SSL_TRUST_STORE: "/opt/<%=searchPath%>/keystore/ssl.repo.client.truststore"
|
||||
SOLR_SSL_TRUST_STORE_PASSWORD: "kT9X6oe68t"
|
||||
SOLR_SSL_TRUST_STORE_TYPE: "JCEKS"
|
||||
SOLR_SSL_KEY_STORE: "/opt/<%=searchPath%>/keystore/ssl-repo-client.keystore"
|
||||
SOLR_SSL_KEY_STORE: "/opt/<%=searchPath%>/keystore/ssl.repo.client.keystore"
|
||||
SOLR_SSL_KEY_STORE_PASSWORD: "kT9X6oe68t"
|
||||
SOLR_SSL_KEY_STORE_TYPE: "JCEKS"
|
||||
SOLR_SSL_NEED_CLIENT_AUTH: "true" <% if (sharding == 'true') { %>
|
||||
SOLR_SSL_CLIENT_KEY_STORE: "/opt/<%=searchPath%>/keystore/ssl-repo-client.keystore"
|
||||
SOLR_SSL_CLIENT_KEY_STORE_TYPE: "JCEKS"
|
||||
SOLR_SSL_CLIENT_TRUST_STORE: "/opt/<%=searchPath%>/keystore/ssl-repo-client.keystore"
|
||||
SOLR_SSL_CLIENT_TRUST_STORE_TYPE: "JCEKS" <% } %>
|
||||
JAVA_TOOL_OPTIONS: "
|
||||
-Dsolr.jetty.truststore.password=kT9X6oe68t
|
||||
-Dsolr.jetty.keystore.password=kT9X6oe68t <% if (sharding == 'true') { %>
|
||||
-Djavax.net.ssl.keyStorePassword=kT9X6oe68t
|
||||
-Djavax.net.ssl.trustStorePassword=kT9X6oe68t <% } %>
|
||||
-Dssl-keystore.password=kT9X6oe68t
|
||||
-Dssl-keystore.aliases=ssl-alfresco-ca,ssl-repo-client
|
||||
-Dssl-keystore.ssl-alfresco-ca.password=kT9X6oe68t
|
||||
-Dssl-keystore.ssl-repo-client.password=kT9X6oe68t
|
||||
-Dssl-truststore.password=kT9X6oe68t
|
||||
-Dssl-truststore.aliases=ssl-alfresco-ca,ssl-repo,ssl-repo-client
|
||||
-Dssl-truststore.ssl-alfresco-ca.password=kT9X6oe68t
|
||||
-Dssl-truststore.ssl-repo.password=kT9X6oe68t
|
||||
-Dssl-truststore.ssl-repo-client.password=kT9X6oe68t
|
||||
"
|
||||
SOLR_SSL_NEED_CLIENT_AUTH: "true"
|
||||
SOLR_OPTS: "
|
||||
-Dsolr.ssl.checkPeerName=false
|
||||
-Dsolr.allow.unsafe.resourceloading=true
|
||||
" <% } %> <% if (httpMode == 'secret') { %>
|
||||
SOLR_OPTS: "
|
||||
-Dalfresco.secureComms.secret=secret
|
||||
" <% } %>
|
||||
ports:
|
||||
- 8084:8983 <% if (httpMode == 'https') { %>
|
||||
- 8083:8983 <% if (httpMode == 'https') { %>
|
||||
volumes:
|
||||
- ./keystores/solr:/opt/<%=searchPath%>/keystore <% } %>
|
||||
<% } %>
|
@@ -25,6 +25,19 @@ VOLUME ["${ALF_DATA_DIR}/keystore"]
|
||||
|
||||
USER root
|
||||
|
||||
ARG SHARE_SERVICES_TAG
|
||||
ENV SHARE_SERVICES_TAG $SHARE_SERVICES_TAG
|
||||
|
||||
# From Alfresco Repository 7.0.0 Share Services AMP is not provided with default Docker Image
|
||||
RUN if [ ! -z "$SHARE_SERVICES_TAG" ] ; then \
|
||||
yum -y update && \
|
||||
yum -y install wget && \
|
||||
yum clean all && \
|
||||
mkdir -p $TOMCAT_DIR/amps && \
|
||||
wget https://nexus.alfresco.com/nexus/service/local/repo_groups/public/content/org/alfresco/alfresco-share-services/${SHARE_SERVICES_TAG}/alfresco-share-services-${SHARE_SERVICES_TAG}.amp \
|
||||
-O $TOMCAT_DIR/amps/alfresco-share-services-${SHARE_SERVICES_TAG}.amp; \
|
||||
fi
|
||||
|
||||
# Install modules and addons
|
||||
RUN mkdir -p $TOMCAT_DIR/amps
|
||||
COPY modules/amps/* $TOMCAT_DIR/amps/
|
||||
|
@@ -21,6 +21,9 @@ ENV ALFRESCO_COMMS $ALFRESCO_COMMS
|
||||
RUN if [ "$ALFRESCO_COMMS" == "https" ] ; then \
|
||||
sed -i '/^bash.*/i sed -i "'"s/alfresco.secureComms=none/alfresco.secureComms=https/g"'" ${DIST_DIR}/solrhome/templates/rerank/conf/solrcore.properties\n' \
|
||||
${DIST_DIR}/solr/bin/search_config_setup.sh; \
|
||||
elif [ "$ALFRESCO_COMMS" == "secret" ] ; then \
|
||||
sed -i '/^bash.*/i sed -i "'"s/alfresco.secureComms=https/alfresco.secureComms=secret/g"'" ${DIST_DIR}/solrhome/templates/rerank/conf/solrcore.properties\n' \
|
||||
${DIST_DIR}/solr/bin/search_config_setup.sh; \
|
||||
else \
|
||||
sed -i '/^bash.*/i sed -i "'"s/alfresco.secureComms=https/alfresco.secureComms=none/g"'" ${DIST_DIR}/solrhome/templates/rerank/conf/solrcore.properties\n' \
|
||||
${DIST_DIR}/solr/bin/search_config_setup.sh; \
|
||||
|
@@ -1,14 +0,0 @@
|
||||
ALFRESCO_TAG=6.3.0-A10
|
||||
ALFRESCO_CE_TAG=latest
|
||||
SHARE_TAG=latest
|
||||
POSTGRES_TAG=11.4
|
||||
TRANSFORM_ROUTER_TAG=1.2.0
|
||||
TRANSFORM_CORE_AIO_TAG=2.2.1
|
||||
SHARED_FILE_STORE_TAG=0.7.0
|
||||
ACTIVE_MQ_TAG=5.15.8
|
||||
DIGITAL_WORKSPACE_TAG=1.5.0
|
||||
ACS_NGINX_TAG=3.0.1
|
||||
ACS_COMMUNITY_NGINX_TAG=1.0.0
|
||||
SEARCH_TAG=latest
|
||||
ZEPPELIN_TAG=latest
|
||||
ACA_TAG=master-latest
|
6733
e2e-test/generator-alfresco-docker-compose/package-lock.json
generated
6733
e2e-test/generator-alfresco-docker-compose/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -21,10 +21,9 @@
|
||||
"npm": ">= 4.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"chalk": "^2.1.0",
|
||||
"yeoman-generator": "^2.0.1",
|
||||
"yo": "^3.1.1",
|
||||
"yosay": "^2.0.1"
|
||||
"chalk": "^2.4.2",
|
||||
"yeoman-generator": "^5.8.0",
|
||||
"yosay": "^2.0.2"
|
||||
},
|
||||
"jest": {
|
||||
"testEnvironment": "node"
|
||||
|
20
e2e-test/helpers/start-alfresco.sh
Executable file
20
e2e-test/helpers/start-alfresco.sh
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env bash
|
||||
set -eux
|
||||
|
||||
# Start Alfresco and Solr.
|
||||
|
||||
# The location for the docker-compose files.
|
||||
DOCKER_RESOURCE_FOLDER=$1
|
||||
# The search docker image.
|
||||
SEARCH_IMAGE=$2
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
export DOCKER_CLIENT_TIMEOUT=120
|
||||
export COMPOSE_HTTP_TIMEOUT=120
|
||||
|
||||
# Build the images and call docker-compose.
|
||||
cd "$DOCKER_RESOURCE_FOLDER"
|
||||
docker-compose up -d --build --force-recreate
|
||||
|
||||
$SCRIPT_DIR/wait-service-to-start.sh
|
37
e2e-test/helpers/wait-service-to-start.sh
Executable file
37
e2e-test/helpers/wait-service-to-start.sh
Executable file
@@ -0,0 +1,37 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e # exit if commands fails
|
||||
set -x # trace what gets exe
|
||||
|
||||
WAIT_INTERVAL=1
|
||||
COUNTER=0
|
||||
TIMEOUT=2000
|
||||
t0=`date +%s`
|
||||
|
||||
declare -a endpoints=("${1:-http://localhost:8081/alfresco/}" "${1:-http://localhost:8083/solr/}")
|
||||
|
||||
for endpoint in "${endpoints[@]}"
|
||||
do
|
||||
|
||||
echo "Waiting for Service to start using endpoint: ${endpoint}"
|
||||
|
||||
additional_args=()
|
||||
if [[ $endpoint == *"solr"* ]]; then
|
||||
additional_args+=(-H "X-Alfresco-Search-Secret: secret")
|
||||
fi
|
||||
|
||||
until [[ "$(curl --output /dev/null -w ''%{http_code}'' "${additional_args[@]}" --silent --head --fail ${endpoint})" == 200 ]] || [ "$COUNTER" -eq "$TIMEOUT" ]; do
|
||||
printf '.'
|
||||
sleep $WAIT_INTERVAL
|
||||
COUNTER=$(($COUNTER+$WAIT_INTERVAL))
|
||||
done
|
||||
|
||||
if (("$COUNTER" < "$TIMEOUT")) ; then
|
||||
t1=`date +%s`
|
||||
delta=$((($t1 - $t0)/60))
|
||||
echo "Service ${endpoint} Started in $delta minutes"
|
||||
else
|
||||
echo "Service ${endpoint} could not start in time."
|
||||
echo "Waited $COUNTER seconds"
|
||||
exit 1
|
||||
fi
|
||||
done
|
@@ -3,21 +3,22 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-search-and-insight-parent</artifactId>
|
||||
<version>2.0.0-SNAPSHOT</version>
|
||||
<version>2.0.9</version>
|
||||
</parent>
|
||||
<groupId>search-analytics-e2e-test</groupId>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>search-analytics-e2e-test</artifactId>
|
||||
<name>Search Analytics E2E Tests</name>
|
||||
<description>Test Project to test Search Service and Analytics Features on a complete setup of Alfresco, Share</description>
|
||||
<properties>
|
||||
<tas.rest.api.version>1.42</tas.rest.api.version>
|
||||
<tas.cmis.api.version>1.13</tas.cmis.api.version>
|
||||
<tas.utility.version>3.0.26</tas.utility.version>
|
||||
<tas.rest.api.version>23.1.0.168</tas.rest.api.version>
|
||||
<tas.cmis.api.version>23.1.0.101</tas.cmis.api.version>
|
||||
<tas.utility.version>4.0.4</tas.utility.version>
|
||||
<rm.version>3.3.1</rm.version>
|
||||
<suiteXmlFile>src/test/resources/SearchSuite.xml</suiteXmlFile>
|
||||
<test.exclude />
|
||||
<test.include />
|
||||
<jackson.databind.version>2.7.7</jackson.databind.version>
|
||||
<dependency.google.guava.version>23.0</dependency.google.guava.version>
|
||||
<jackson.databind.version>2.9.10.8</jackson.databind.version>
|
||||
<licenseName>community</licenseName>
|
||||
</properties>
|
||||
<build>
|
||||
@@ -32,8 +33,21 @@
|
||||
<excludedGroups>${test.exclude}</excludedGroups>
|
||||
<groups>${test.include}</groups>
|
||||
<redirectTestOutputToFile>true</redirectTestOutputToFile>
|
||||
<argLine>
|
||||
--add-opens=java.base/java.lang=ALL-UNNAMED
|
||||
</argLine>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>license-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>third-party-licenses</id>
|
||||
<phase>none</phase>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
<dependencies>
|
||||
@@ -66,6 +80,10 @@
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-governance-services-automation-enterprise-rest-api</artifactId>
|
||||
@@ -81,7 +99,7 @@
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>${jackson.databind.version}</version>
|
||||
<version>${dependency.jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.alfresco</groupId>
|
||||
@@ -119,7 +137,7 @@
|
||||
<dependency>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<version>1.18.12</version>
|
||||
<version>1.18.30</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@@ -128,4 +146,57 @@
|
||||
<version>12-ea+10</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>alfresco-enterprise-releases</id>
|
||||
<url>https://artifacts.alfresco.com/nexus/content/repositories/enterprise-releases</url>
|
||||
<releases>
|
||||
<enabled>true</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>alfresco-enterprise-snapshots</id>
|
||||
<url>https://artifacts.alfresco.com/nexus/content/repositories/enterprise-snapshots</url>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>alfresco-public-releases</id>
|
||||
<url>https://artifacts.alfresco.com/nexus/content/groups/public</url>
|
||||
<releases>
|
||||
<enabled>true</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>alfresco-public-snapshots</id>
|
||||
<url>https://artifacts.alfresco.com/nexus/content/groups/public-snapshots</url>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>alfresco-hotfix</id>
|
||||
<url>https://artifacts.alfresco.com/nexus/content/groups/hotfix</url>
|
||||
<releases>
|
||||
<enabled>true</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
</repositories>
|
||||
</project>
|
||||
|
46
e2e-test/python-generator/README.md
Normal file
46
e2e-test/python-generator/README.md
Normal file
@@ -0,0 +1,46 @@
|
||||
# About
|
||||
A python script to generator docker-compose files suitable for use with the automated tests in
|
||||
the [Search and Insight E2E tests](https://git.alfresco.com/search_discovery/insightengine/tree/master/e2e-test).
|
||||
|
||||
# Installation
|
||||
The script uses Python 3, which can be installed from [python.org](https://www.python.org/downloads/) or using
|
||||
a package manager. You will also need the [yaml library](https://pypi.org/project/PyYAML/), which can be installed
|
||||
using Pip if it is not already present:
|
||||
```
|
||||
pip3 install pyyaml
|
||||
```
|
||||
|
||||
# Using the script
|
||||
The script provides some help with the `-h` option:
|
||||
|
||||
```
|
||||
python3 BuildScripts/generator/generator.py -h
|
||||
usage: generator.py [-h] [-a ALFRESCO]...
|
||||
...
|
||||
```
|
||||
|
||||
For use with ACS 6.2.x the legacy transformers can be included:
|
||||
```
|
||||
python3 BuildScripts/generator/generator.py --alfresco=alfresco/alfresco-content-repository:6.2.0 --transformer=AIOTransformers
|
||||
```
|
||||
|
||||
For ACS 6.0.x and ACS 6.1.x no external transformer is needed:
|
||||
```
|
||||
python3 BuildScripts/generator/generator.py --alfresco=alfresco/alfresco-content-repository:6.1.0
|
||||
```
|
||||
|
||||
For ACS 6.0.0.x and earlier ActiveMQ can be excluded:
|
||||
```
|
||||
python3 BuildScripts/generator/generator.py --alfresco=alfresco/alfresco-content-repository:6.0.0.3 --excludeAMQ
|
||||
```
|
||||
|
||||
For ACS 5.2.x the legacy LibreOffice transformer can be used:
|
||||
```
|
||||
python3 BuildScripts/generator/generator.py --alfresco=quay.io/alfresco/alfresco-content-repository-52:5.2.5 --postgres=postgres:9.4 --excludeAMQ --transformer=LibreOffice
|
||||
```
|
||||
|
||||
# Starting the containers
|
||||
To start the containers you also need to build the images - for example:
|
||||
```
|
||||
docker-compose up --build --force-recreate
|
||||
```
|
356
e2e-test/python-generator/generator.py
Normal file
356
e2e-test/python-generator/generator.py
Normal file
@@ -0,0 +1,356 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import argparse
|
||||
from string import Template
|
||||
import yaml
|
||||
import os
|
||||
from distutils.dir_util import copy_tree
|
||||
|
||||
LIBRE_OFFICE = 'LibreOffice'
|
||||
AIO_TRANSFORMERS = 'AIOTransformers'
|
||||
|
||||
AMQ_OPTS = '-Dmessaging.broker.url="failover:(nio://activemq:61616)?timeout=3000&jms.useCompression=true"'
|
||||
TRANSFORM_OPTS = ('-DlocalTransform.core-aio.url=http://transform-core-aio:8090/ '
|
||||
'-Dalfresco-pdf-renderer.url=http://transform-core-aio:8090/ '
|
||||
'-Djodconverter.url=http://transform-core-aio:8090/ '
|
||||
'-Dimg.url=http://transform-core-aio:8090/ '
|
||||
'-Dtika.url=http://transform-core-aio:8090/ '
|
||||
'-Dtransform.misc.url=http://transform-core-aio:8090/')
|
||||
SHARE_TRANSFORM_OPTS = ('-DlocalTransform.pdfrenderer.url=http://alfresco-pdf-renderer:8090/ '
|
||||
'-Dalfresco-pdf-renderer.url=http://alfresco-pdf-renderer:8090/ '
|
||||
'-DlocalTransform.imagemagick.url=http://imagemagick:8090/ -Dimg.url=http://imagemagick:8090/')
|
||||
SHARDING_OPTS = '-Dsolr.useDynamicShardRegistration=true'
|
||||
JAVA_OPTS = ('-Ddb.driver=org.postgresql.Driver -Ddb.username=alfresco -Ddb.password=alfresco '
|
||||
'-Ddb.url=jdbc:postgresql://postgres:5432/alfresco -Dsolr.port=8983 '
|
||||
'-Dsystem.acl.maxPermissionCheckEnabled=true '
|
||||
'-Dindex.subsystem.name=solr6 '
|
||||
'-Dalfresco.restApi.basicAuthScheme=true '
|
||||
# longer timeouts for CI
|
||||
'-Dsolr.http.socket.timeout=30000 '
|
||||
'-Dsolr.http.connection.timeout=3000 ')
|
||||
MTLS_OPTS = ('-Dsolr.port.ssl=8983 -Dsolr.secureComms=https ')
|
||||
HTTP_OPTS = ('-Dsolr.secureComms=none')
|
||||
SECRET_OPTS = ('-Dsolr.secureComms=secret -Dsolr.sharedSecret=secret')
|
||||
JAVA_TOOL_OPTIONS = ('-Dencryption.keystore.type=JCEKS '
|
||||
'-Dencryption.cipherAlgorithm=DESede/CBC/PKCS5Padding '
|
||||
'-Dencryption.keyAlgorithm=DESede '
|
||||
'-Dencryption.keystore.location=/usr/local/tomcat/shared/classes/alfresco/extension/keystore/keystore '
|
||||
'-Dmetadata-keystore.password=mp6yc0UD9e '
|
||||
'-Dmetadata-keystore.aliases=metadata '
|
||||
'-Dmetadata-keystore.metadata.password=oKIWzVdEdA '
|
||||
'-Dmetadata-keystore.metadata.algorithm=DESede')
|
||||
MTLS_JAVA_TOOL_OPTIONS = ('-Dencryption.keystore.type=pkcs12 -Dencryption.cipherAlgorithm=AES/CBC/PKCS5Padding '
|
||||
'-Dencryption.keyAlgorithm=DESede '
|
||||
'-Dssl-truststore.password=kT9X6oe68t -Dssl-keystore.password=kT9X6oe68t '
|
||||
'-Dssl-keystore.aliases=ssl-alfresco-ca,ssl-repo '
|
||||
'-Dssl-keystore.ssl-alfresco-ca.password=kT9X6oe68t '
|
||||
'-Dssl-keystore.ssl-repo.password=kT9X6oe68t '
|
||||
'-Dssl-truststore.aliases=alfresco-ca,ssl-repo-client '
|
||||
'-Dssl-truststore.alfresco-ca.password=kT9X6oe68t '
|
||||
'-Dssl-truststore.ssl-repo-client.password=kT9X6oe68t')
|
||||
|
||||
def getJavaOpts(includeAMQ, includeTransform, includeShare, solrHost, solrBaseUrl, sharding, communication):
|
||||
|
||||
solrHost = '-Dsolr.host=' + solrHost
|
||||
shardingOpts = (SHARDING_OPTS if sharding != None else '')
|
||||
amqOpts = (AMQ_OPTS if includeAMQ else '')
|
||||
transformOpts = (TRANSFORM_OPTS if includeTransform else '')
|
||||
solrBaseUrlOpts = '-Dsolr.baseUrl=' + solrBaseUrl
|
||||
shareTransformOpts = (SHARE_TRANSFORM_OPTS if includeShare and includeTransform else '')
|
||||
if communication == 'mtls':
|
||||
commOpts = MTLS_OPTS
|
||||
elif communication == 'none':
|
||||
commOpts = HTTP_OPTS
|
||||
else :
|
||||
commOpts = SECRET_OPTS
|
||||
|
||||
return ' '.join([JAVA_OPTS, amqOpts, transformOpts, shareTransformOpts, solrHost, solrBaseUrlOpts, shardingOpts, commOpts])
|
||||
|
||||
def getJavaToolOptions(communication):
|
||||
|
||||
mtlsJavaToolOptions = (MTLS_JAVA_TOOL_OPTIONS if communication == 'mtls' else '')
|
||||
|
||||
return ' '.join([JAVA_TOOL_OPTIONS, mtlsJavaToolOptions])
|
||||
|
||||
def deleteServices(dcYaml, *services):
|
||||
for service in services:
|
||||
if service in dcYaml['services'].keys():
|
||||
del(dcYaml['services'][service])
|
||||
|
||||
def getExtraEnvironmentVars(serviceName, replicationType):
|
||||
"""Return a dict of environment variables to add to the search container declaration in docker-compose.yml."""
|
||||
extraEnvironmentVars = {}
|
||||
if replicationType == 'master':
|
||||
extraEnvironmentVars['REPLICATION_TYPE'] = 'master'
|
||||
elif replicationType == 'slave':
|
||||
extraEnvironmentVars.update({'REPLICATION_TYPE': 'slave',
|
||||
'REPLICATION_MASTER_HOST': serviceName.replace('slave', 'master'),
|
||||
'REPLICATION_MASTER_PORT': '8983',
|
||||
'REPLICATION_POLL_INTERVAL': '00:00:10'})
|
||||
return extraEnvironmentVars
|
||||
|
||||
def getSolrcoreConfig(sharding, shardId, shardCount, shardRange):
|
||||
"""Returns a list of properties to add to the end of the solrcore.properties file."""
|
||||
solrcoreConfig = []
|
||||
if sharding != None:
|
||||
solrcoreConfig.append('solr.port.ssl=8983')
|
||||
solrcoreConfig.append('shard.instance={}'.format(shardId))
|
||||
solrcoreConfig.append('alfresco.port=8080')
|
||||
solrcoreConfig.append('alfresco.port.ssl=8443')
|
||||
solrcoreConfig.append('alfresco.baseUrl=/alfresco')
|
||||
if sharding not in ['DB_ID_RANGE', 'EXPLICIT_ID_FALLBACK_LRIS']:
|
||||
solrcoreConfig.append('shard.count={}'.format(shardCount))
|
||||
if sharding == 'DB_ID_RANGE':
|
||||
# The first shards each contain 800 nodes by default and the last continues the range to id 100000.
|
||||
nodesPerShard = shardRange
|
||||
rangeStart = shardId * nodesPerShard
|
||||
rangeEnd = (rangeStart + nodesPerShard - 1 if shardId < shardCount - 1 else 100000)
|
||||
solrcoreConfig.append('shard.range={}-{}'.format(rangeStart, rangeEnd))
|
||||
if sharding == 'DATE':
|
||||
solrcoreConfig.append('shard.key=cm:created')
|
||||
solrcoreConfig.append('shard.date.grouping={}'.format(12 // shardCount))
|
||||
if sharding in ['PROPERTY', 'EXPLICIT_ID', 'EXPLICIT_ID_FALLBACK_LRIS']:
|
||||
solrcoreConfig.append('shard.key=shard:shardId')
|
||||
print("SolrConfig for Shard: ", shardId, " : ", solrcoreConfig)
|
||||
return solrcoreConfig
|
||||
|
||||
def getSolrcoreReplacements(sharding, communication, fingerprint):
|
||||
"""Returns a dict of replacements to make in the solrcore.properties file."""
|
||||
solrcoreReplacements = {}
|
||||
if fingerprint == 'true':
|
||||
solrcoreReplacements['alfresco.fingerprint=false'] = 'alfresco.fingerprint=true'
|
||||
if sharding != None:
|
||||
solrcoreReplacements['shard.method=DB_ID'] = 'shard.method={}'.format(sharding)
|
||||
if communication == 'mtls':
|
||||
solrcoreReplacements['alfresco.secureComms=none'] = 'alfresco.secureComms=https'
|
||||
solrcoreReplacements['alfresco.encryption.ssl.keystore.location=.*'] = 'alfresco.encryption.ssl.keystore.location=\\\\\\/opt\\\\\\/alfresco-search-services\\\\\\/keystore\\\\\\/ssl-repo-client.keystore'
|
||||
solrcoreReplacements['alfresco.encryption.ssl.keystore.type=.*'] = 'alfresco.encryption.ssl.keystore.type=JCEKS'
|
||||
solrcoreReplacements['alfresco.encryption.ssl.truststore.location=.*'] = 'alfresco.encryption.ssl.truststore.location=\\\\\\/opt\\\\\\/alfresco-search-services\\\\\\/keystore\\\\\\/ssl-repo-client.truststore'
|
||||
solrcoreReplacements['alfresco.encryption.ssl.truststore.type=.*'] = 'alfresco.encryption.ssl.truststore.type=JCEKS'
|
||||
elif communication == 'none':
|
||||
solrcoreReplacements['alfresco.secureComms=https'] = r'alfresco.secureComms=none\\\\\\\nalfresco.allowUnauthenticatedSolrEndpoint=true'
|
||||
else :
|
||||
solrcoreReplacements['alfresco.secureComms=https'] = 'alfresco.secureComms=secret'
|
||||
return solrcoreReplacements
|
||||
|
||||
def addAlfrescoMtlsConfig(alfrescoArgsNode):
|
||||
"""Add a list of environment values in Docker Compose Alfresco Service for mTLS."""
|
||||
alfrescoArgsNode['TRUSTSTORE_TYPE'] = 'JCEKS'
|
||||
alfrescoArgsNode['TRUSTSTORE_PASS'] = 'kT9X6oe68t'
|
||||
alfrescoArgsNode['KEYSTORE_TYPE'] = 'JCEKS'
|
||||
alfrescoArgsNode['KEYSTORE_PASS'] = 'kT9X6oe68t'
|
||||
alfrescoArgsNode['SOLR_COMMS'] = 'https'
|
||||
|
||||
def addAlfrescoVolumes(alfrescoNode):
|
||||
"""Add route to keystores folder"""
|
||||
alfrescoNode['volumes'] = ['./keystores/alfresco:/usr/local/tomcat/alf_data/keystore']
|
||||
|
||||
def addSolrMtlsConfig(solrEnvNode):
|
||||
"""Add a list of environment values in Docker Compose SOLR Service for mTLS."""
|
||||
solrEnvNode['SOLR_SSL_TRUST_STORE'] = '/opt/alfresco-search-services/keystore/ssl-repo-client.truststore'
|
||||
solrEnvNode['SOLR_SSL_TRUST_STORE_TYPE'] = 'JCEKS'
|
||||
solrEnvNode['SOLR_SSL_KEY_STORE'] = '/opt/alfresco-search-services/keystore/ssl-repo-client.keystore'
|
||||
solrEnvNode['SOLR_SSL_KEY_STORE_TYPE'] = 'JCEKS'
|
||||
solrEnvNode['SOLR_SSL_NEED_CLIENT_AUTH'] = 'true'
|
||||
|
||||
def addSolrOpts(solrEnvNode):
|
||||
"""Add a list of values to add in Docker Compose SOLR_OPTS property for mTLS."""
|
||||
solrOptions = ' '.join(['-Dsolr.ssl.checkPeerName=false',
|
||||
'-Dsolr.allow.unsafe.resourceloading=true'])
|
||||
solrEnvNode['SOLR_OPTS'] = solrOptions
|
||||
|
||||
def addSolrJavaToolOptions(solrEnvNode):
|
||||
"""Add a list of values to add in Docker Compose JAVA_TOOL_OPTIONS property for mTLS."""
|
||||
solrOptions = ' '.join(['-Dsolr.jetty.truststore.password=kT9X6oe68t ',
|
||||
'-Dsolr.jetty.keystore.password=kT9X6oe68t ',
|
||||
'-Dssl-keystore.password=kT9X6oe68t',
|
||||
'-Dssl-keystore.aliases=ssl-alfresco-ca,ssl-repo-client',
|
||||
'-Dssl-keystore.ssl-alfresco-ca.password=kT9X6oe68t',
|
||||
'-Dssl-keystore.ssl-repo-client.password=kT9X6oe68t',
|
||||
'-Dssl-truststore.password=kT9X6oe68t',
|
||||
'-Dssl-truststore.aliases=ssl-alfresco-ca,ssl-repo,ssl-repo-client',
|
||||
'-Dssl-truststore.ssl-alfresco-ca.password=kT9X6oe68t',
|
||||
'-Dssl-truststore.ssl-repo.password=kT9X6oe68t',
|
||||
'-Dssl-truststore.ssl-repo-client.password=kT9X6oe68t'])
|
||||
solrEnvNode['JAVA_TOOL_OPTIONS'] = solrOptions
|
||||
|
||||
def addSolrVolumes(solrNode):
|
||||
"""Add route to keystores folder"""
|
||||
solrNode['volumes'] = ['./keystores/solr:/opt/alfresco-search-services/keystore']
|
||||
|
||||
def addSharedSecretSolrOpts(solrEnvNode):
|
||||
"""Add a list of values to add in Docker Compose SOLR_OPTS property for Shared Secret communication."""
|
||||
solrEnvNode['SOLR_OPTS'] = '-Dalfresco.secureComms.secret=secret'
|
||||
|
||||
def makeSearchNode(outputDirectory, nodeName, externalPort, params, communication, extraEnvironmentVars={}, solrcoreConfig=[], solrcoreReplacements={}):
|
||||
# Create a dictionary for the template replacement.
|
||||
allParams = dict(params)
|
||||
allParams['SOLR_HOST'] = nodeName
|
||||
allParams['ALFRESCO_PORT'] = 8443 if communication == 'mtls' else 8080
|
||||
allParams['EXTERNAL_PORT'] = externalPort
|
||||
# Properties to add to solrcore.properties.
|
||||
allParams['SOLRCORE_PROPERTIES'] = '\\n'.join(solrcoreConfig)
|
||||
# Replacements to make in solrcore.properties (in an "abc/xyz" format suitable for sed).
|
||||
allParams['SOLRCORE_REPLACEMENTS'] = ' '.join(map(lambda pair: '"{}/{}"'.format(*pair), solrcoreReplacements.items()))
|
||||
|
||||
# mTLS settings
|
||||
if communication == 'mtls':
|
||||
allParams['ALFRESCO_SECURE_COMMS'] = 'https'
|
||||
elif communication == 'none':
|
||||
allParams['ALFRESCO_SECURE_COMMS'] = 'none'
|
||||
else :
|
||||
allParams['ALFRESCO_SECURE_COMMS'] = 'secret'
|
||||
|
||||
allParams['TRUSTSTORE_TYPE'] = 'JCEKS'
|
||||
allParams['KEYSTORE_TYPE'] = 'JCEKS'
|
||||
|
||||
# Create a Dockerfile with any extra configuration in.
|
||||
with open(scriptDir + '/templates/search/Dockerfile.template') as f:
|
||||
dockerfileTemplate = f.read()
|
||||
dockerfileString = Template(dockerfileTemplate).substitute(allParams)
|
||||
if not os.path.isdir('{}/{}'.format(outputDirectory, nodeName)):
|
||||
os.mkdir('{}/{}'.format(outputDirectory, nodeName))
|
||||
with open('{}/{}/Dockerfile'.format(outputDirectory, nodeName), 'w') as f:
|
||||
dockerfileTemplate = f.write(dockerfileString)
|
||||
|
||||
# Load the search node template.
|
||||
with open(scriptDir + '/templates/search-node.yml.template') as f:
|
||||
searchNodeTemplate = f.read()
|
||||
searchNodeString = Template(searchNodeTemplate).substitute(allParams)
|
||||
# Read the result as yaml.
|
||||
searchNodeYaml = yaml.safe_load(searchNodeString)
|
||||
# Add any extra environment variables.
|
||||
searchNodeYaml['environment'].update(extraEnvironmentVars)
|
||||
|
||||
# Add mTLS configuration if required
|
||||
if communication == 'mtls':
|
||||
addSolrMtlsConfig(searchNodeYaml['environment'])
|
||||
addSolrOpts(searchNodeYaml['environment'])
|
||||
addSolrJavaToolOptions(searchNodeYaml['environment'])
|
||||
addSolrVolumes(searchNodeYaml)
|
||||
|
||||
# Add shared secret configuration if required
|
||||
if communication == 'secret':
|
||||
addSharedSecretSolrOpts(searchNodeYaml['environment'])
|
||||
|
||||
return searchNodeYaml
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Generate a docker-compose file for ACS.')
|
||||
parser.add_argument('-a', '--alfresco', default='quay.io/alfresco/dev:acs-for-search', help='The Alfresco image')
|
||||
parser.add_argument('-s', '--search', default='quay.io/alfresco/search-services:latest', help='The Search image')
|
||||
parser.add_argument('-e', '--share', help='The Share image (or omit for no UI)')
|
||||
parser.add_argument('-p', '--postgres', default='postgres:14.4', help='The Postgres image')
|
||||
parser.add_argument('-q', '--excludeAMQ', action='store_true', help='Exclude ActiveMQ (i.e. pre-ACS 6.1)')
|
||||
parser.add_argument('-t', '--transformer', choices=[LIBRE_OFFICE, AIO_TRANSFORMERS], help='Use external transformers. '
|
||||
+ '"{}" for legacy LibreOffice (i.e. ACS 5.2.x). '.format(LIBRE_OFFICE)
|
||||
+ '"{}" for the all-in-one transformers for use with ACS 6.2.x and later.'.format(AIO_TRANSFORMERS))
|
||||
parser.add_argument('-c', '--spellcheck', action='store_true', help='Spellcheck Enabled')
|
||||
parser.add_argument('-ms', '--masterslave', action='store_true', help='Master Slave Enabled')
|
||||
parser.add_argument('-sh', '--sharding', help='Sharding method (or omit for no sharding). Note that sharding is not supported on SearchServices 1.2.x or earlier.',
|
||||
choices=['DB_ID', 'DB_ID_RANGE', 'ACL_ID', 'MOD_ACL_ID', 'DATE', 'PROPERTY', 'LRIS', 'EXPLICIT_ID', 'EXPLICIT_ID_FALLBACK_LRIS'])
|
||||
parser.add_argument('-sc', '--shardCount', type=int, help='Total number of shards to create (default 2)')
|
||||
parser.add_argument('-sr', '--shardRange', type=int, help='Total number of nodes per shard with DB_ID_RANGE sharding (default 800)')
|
||||
parser.add_argument('-ct', '--disableCascadeTracking', action='store_true', help='Cascade Tracking Disabled')
|
||||
parser.add_argument('-ef', '--enableFingerprint', action='store_true', help='Enable Fingerprint feature')
|
||||
parser.add_argument('-ecl', '--enableCrossLocale', action='store_true', help='Enable Cross Locale configuration')
|
||||
parser.add_argument('-sl', '--searchLogLevel', default='WARN', help='The log level for search (default WARN)',
|
||||
choices=['TRACE', 'DEBUG', 'INFO', 'WARN', 'ERROR'])
|
||||
parser.add_argument('-o', '--output', default='.', help='The path of the directory to output to')
|
||||
parser.add_argument('-comm', '--communication', default='none', help='Use none, mtls or secret communication between SOLR and Alfresco Repository',
|
||||
choices=['none', 'mtls', 'secret'])
|
||||
args = parser.parse_args()
|
||||
|
||||
# If sharding is selected then the default number of shards is two.
|
||||
if args.sharding != None and args.shardCount == None:
|
||||
print('Using default shardCount of 2')
|
||||
args.shardCount = 2
|
||||
elif args.sharding == None and args.shardCount != None:
|
||||
print('ERROR: shardCount={} specified without sharding method'.format(args.shardCount))
|
||||
exit(1)
|
||||
print('Arguments:', args)
|
||||
|
||||
# If sharding is selected then the default number of nodes per shard is 800.
|
||||
if args.sharding == "DB_ID_RANGE" and (args.shardRange == None or args.shardRange < 1):
|
||||
print('Using default shardRange of 800')
|
||||
args.shardRange = 800
|
||||
elif args.sharding != 'DB_ID_RANGE' and args.shardRange != None:
|
||||
print('ERROR: shardRange={} is only supported for DB_ID_RANGE sharding.')
|
||||
exit(1)
|
||||
print('Arguments:', args)
|
||||
|
||||
# Load the template and perform basic token substitution.
|
||||
scriptDir = os.path.dirname(os.path.realpath(__file__))
|
||||
with open(scriptDir + '/templates/docker-compose.yml.template') as f:
|
||||
template = f.read()
|
||||
params = {
|
||||
'ALFRESCO_IMAGE': args.alfresco,
|
||||
'SHARE_IMAGE': args.share,
|
||||
'POSTGRES_IMAGE': args.postgres,
|
||||
'SEARCH_IMAGE': args.search,
|
||||
'SEARCH_LOG_LEVEL': args.searchLogLevel,
|
||||
'ENABLE_CROSS_LOCALE': args.enableCrossLocale,
|
||||
'SEARCH_ENABLE_SPELLCHECK': str(args.spellcheck).lower(),
|
||||
'DISABLE_CASCADE_TRACKING': str(args.disableCascadeTracking).lower()
|
||||
}
|
||||
dcString = Template(template).substitute(params)
|
||||
|
||||
# Edit the resulting yaml.
|
||||
dcYaml = yaml.safe_load(dcString)
|
||||
|
||||
# Insert the search node(s).
|
||||
shardList = range(args.shardCount) if args.sharding != None else [0]
|
||||
replicationTypes = ['master', 'slave'] if args.masterslave else ['standalone']
|
||||
for shardId in shardList:
|
||||
for replicationType in replicationTypes:
|
||||
serviceName = 'search_{}_{}'.format(shardId, replicationType)
|
||||
# Workaround for ShardInfoTest.getShardInfoWithAdminAuthority.
|
||||
if shardId == 0 and replicationType == 'standalone':
|
||||
serviceName = 'search'
|
||||
externalPort = 8083 + 100 * shardId + (1 if replicationType == 'slave' else 0)
|
||||
dcYaml['services'][serviceName] = makeSearchNode(args.output, serviceName, externalPort, params, args.communication,
|
||||
extraEnvironmentVars=getExtraEnvironmentVars(serviceName, replicationType),
|
||||
solrcoreConfig=getSolrcoreConfig(args.sharding, shardId, args.shardCount, args.shardRange),
|
||||
solrcoreReplacements=getSolrcoreReplacements(args.sharding, args.communication, str(args.enableFingerprint).lower()))
|
||||
|
||||
# Point Alfresco at whichever Solr node came last in the list.
|
||||
solrHost = serviceName
|
||||
solrBaseUrl = '/solr-slave' if args.masterslave else '/solr'
|
||||
|
||||
javaOpts = getJavaOpts(not args.excludeAMQ, args.transformer == AIO_TRANSFORMERS, args.share != None, solrHost, solrBaseUrl, args.sharding, args.communication)
|
||||
dcYaml['services']['alfresco']['environment']['JAVA_OPTS'] = javaOpts
|
||||
javaToolOpts = getJavaToolOptions(args.communication)
|
||||
dcYaml['services']['alfresco']['environment']['JAVA_TOOL_OPTIONS'] = javaToolOpts
|
||||
if args.communication == 'mtls':
|
||||
addAlfrescoMtlsConfig(dcYaml['services']['alfresco']['build']['args'])
|
||||
addAlfrescoVolumes(dcYaml['services']['alfresco'])
|
||||
elif args.communication == 'none':
|
||||
dcYaml['services']['alfresco']['build']['args']['SOLR_COMMS'] = 'none'
|
||||
|
||||
if not args.share:
|
||||
deleteServices(dcYaml, 'share', 'alfresco-pdf-renderer', 'imagemagick')
|
||||
if args.excludeAMQ:
|
||||
deleteServices(dcYaml, 'activemq')
|
||||
if args.transformer != AIO_TRANSFORMERS:
|
||||
deleteServices(dcYaml, 'transform-core-aio')
|
||||
del(dcYaml['volumes']['shared-file-store-volume'])
|
||||
if args.transformer == LIBRE_OFFICE:
|
||||
dcYaml['services']['libreoffice'] = {'image': 'xcgd/libreoffice'}
|
||||
|
||||
# Output the yaml.
|
||||
with open(args.output + '/docker-compose.yml', 'w') as f:
|
||||
f.write(yaml.safe_dump(dcYaml))
|
||||
|
||||
# Create an Alfresco Dockerfile with any extra configuration in.
|
||||
with open(scriptDir + '/templates/alfresco/Dockerfile.template') as f:
|
||||
dockerfileTemplate = f.read()
|
||||
dockerfileString = Template(dockerfileTemplate).substitute(params)
|
||||
if not os.path.isdir('{}/{}'.format(args.output, 'alfresco')):
|
||||
os.mkdir('{}/{}'.format(args.output, 'alfresco'))
|
||||
with open('{}/{}/Dockerfile'.format(args.output, 'alfresco'), 'w') as f:
|
||||
dockerfileTemplate = f.write(dockerfileString)
|
||||
|
||||
# Copy the keystores (when using mTLS)
|
||||
if args.communication == 'mtls':
|
||||
copy_tree(scriptDir + '/keystores', args.output + '/keystores')
|
BIN
e2e-test/python-generator/keystores/alfresco/keystore
Normal file
BIN
e2e-test/python-generator/keystores/alfresco/keystore
Normal file
Binary file not shown.
BIN
e2e-test/python-generator/keystores/alfresco/ssl.keystore
Normal file
BIN
e2e-test/python-generator/keystores/alfresco/ssl.keystore
Normal file
Binary file not shown.
BIN
e2e-test/python-generator/keystores/alfresco/ssl.truststore
Normal file
BIN
e2e-test/python-generator/keystores/alfresco/ssl.truststore
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
55
e2e-test/python-generator/templates/alfresco/Dockerfile.template
Executable file
55
e2e-test/python-generator/templates/alfresco/Dockerfile.template
Executable file
@@ -0,0 +1,55 @@
|
||||
FROM ${ALFRESCO_IMAGE}
|
||||
|
||||
ENV TOMCAT_DIR=/usr/local/tomcat
|
||||
ENV ALF_DATA_DIR=$${TOMCAT_DIR}/alf_data
|
||||
|
||||
# COMMS
|
||||
ARG SOLR_COMMS
|
||||
ENV SOLR_COMMS $$SOLR_COMMS
|
||||
|
||||
# SSL
|
||||
ARG TRUSTSTORE_TYPE
|
||||
ARG TRUSTSTORE_PASS
|
||||
ARG KEYSTORE_TYPE
|
||||
ARG KEYSTORE_PASS
|
||||
|
||||
ENV TRUSTSTORE_TYPE=$$TRUSTSTORE_TYPE \
|
||||
TRUSTSTORE_PASS=$$TRUSTSTORE_PASS \
|
||||
KEYSTORE_TYPE=$$KEYSTORE_TYPE \
|
||||
KEYSTORE_PASS=$$KEYSTORE_PASS
|
||||
|
||||
USER root
|
||||
|
||||
# Default value in 'repository.properties' is 'dir.keystore=classpath:alfresco/keystore'
|
||||
RUN if [ "$$SOLR_COMMS" == "https" ] ; then \
|
||||
echo -e "\n\
|
||||
dir.keystore=$${ALF_DATA_DIR}/keystore\n\
|
||||
alfresco.encryption.ssl.keystore.type=$${TRUSTSTORE_TYPE}\n\
|
||||
alfresco.encryption.ssl.truststore.type=$${KEYSTORE_TYPE}\n\
|
||||
" >> $${TOMCAT_DIR}/shared/classes/alfresco-global.properties; \
|
||||
fi
|
||||
|
||||
# Enable SSL by adding the proper Connector to server.xml
|
||||
RUN if [ "$$SOLR_COMMS" == "https" ] ; then \
|
||||
sed -i "s/\
|
||||
[[:space:]]\+<\/Engine>/\n\
|
||||
<\/Engine>\n\
|
||||
<Connector port=\"8443\" protocol=\"org.apache.coyote.http11.Http11Protocol\"\n\
|
||||
connectionTimeout=\"20000\"\n\
|
||||
SSLEnabled=\"true\" maxThreads=\"150\" scheme=\"https\"\n\
|
||||
keystoreFile=\"\/usr\/local\/tomcat\/alf_data\/keystore\/ssl.keystore\"\n\
|
||||
keystorePass=\"$${KEYSTORE_PASS}\" keystoreType=\"$${KEYSTORE_TYPE}\" secure=\"true\"\n\
|
||||
truststoreFile=\"\/usr\/local\/tomcat\/alf_data\/keystore\/ssl.truststore\"\n\
|
||||
truststorePass=\"$${TRUSTSTORE_PASS}\" truststoreType=\"$${TRUSTSTORE_TYPE}\" clientAuth=\"want\" sslProtocol=\"TLS\">\n\
|
||||
<\/Connector>/g" $${TOMCAT_DIR}/conf/server.xml; \
|
||||
elif [ "$$SOLR_COMMS" == "none" ] ; then \
|
||||
sed -i "s/<filter-class>org.alfresco.web.app.servlet.AlfrescoX509ServletFilter<\/filter-class>/&\n\
|
||||
<init-param>\n\
|
||||
<param-name>allow-unauthenticated-solr-endpoint<\/param-name>\n\
|
||||
<param-value>true<\/param-value>\n\
|
||||
<\/init-param>/" $${TOMCAT_DIR}/webapps/alfresco/WEB-INF/web.xml; \
|
||||
fi
|
||||
|
||||
# Expose keystore folder
|
||||
# Useless for 'none'/'http' communications with SOLR
|
||||
VOLUME ["$$ALF_DATA_DIR/keystore"]
|
@@ -0,0 +1,52 @@
|
||||
version: '3'
|
||||
services:
|
||||
alfresco:
|
||||
build:
|
||||
context: ./alfresco
|
||||
args:
|
||||
SOLR_COMMS: none
|
||||
environment:
|
||||
CATALINA_OPTS : "-agentlib:jdwp=transport=dt_socket,address=*:8000,server=y,suspend=n"
|
||||
JAVA_OPTS : "This will be populated by the generator script"
|
||||
JAVA_TOOL_OPTIONS : "This will be populated by the generator script"
|
||||
ports:
|
||||
- "7203:7203" #JMX connect via service:jmx:rmi:///jndi/rmi://localhost:7203/jmxrmi
|
||||
- "8000:8000" #Java debugging
|
||||
- "8081:8080" #Browser port for Alfresco
|
||||
share:
|
||||
image: ${SHARE_IMAGE}
|
||||
environment:
|
||||
- REPO_HOST=alfresco
|
||||
- REPO_PORT=8080
|
||||
ports:
|
||||
- 8082:8080 #Browser port for Share
|
||||
postgres:
|
||||
image: ${POSTGRES_IMAGE}
|
||||
environment:
|
||||
- POSTGRES_PASSWORD=alfresco
|
||||
- POSTGRES_USER=alfresco
|
||||
- POSTGRES_DB=alfresco
|
||||
ports:
|
||||
- 5432:5432
|
||||
activemq:
|
||||
image: alfresco/alfresco-activemq:5.17.1-jre11-rockylinux8
|
||||
ports:
|
||||
- 8161:8161 # Web Console
|
||||
- 5672:5672 # AMQP
|
||||
- 61616:61616 # OpenWire
|
||||
- 61613:61613 # STOMP
|
||||
transform-core-aio:
|
||||
image: alfresco/alfresco-transform-core-aio:2.3.5
|
||||
environment:
|
||||
JAVA_OPTS: " -Xms256m -Xmx512m"
|
||||
ACTIVEMQ_URL: "nio://activemq:61616"
|
||||
ACTIVEMQ_USER: "admin"
|
||||
ACTIVEMQ_PASSWORD: "admin"
|
||||
FILE_STORE_URL: "http://shared-file-store:8099/alfresco/api/-default-/private/sfs/versions/1/file"
|
||||
ports:
|
||||
- 8090:8090
|
||||
volumes:
|
||||
shared-file-store-volume:
|
||||
driver_opts:
|
||||
type: tmpfs
|
||||
device: tmpfs
|
17
e2e-test/python-generator/templates/search-node.yml.template
Normal file
17
e2e-test/python-generator/templates/search-node.yml.template
Normal file
@@ -0,0 +1,17 @@
|
||||
build:
|
||||
context: ./${SOLR_HOST}
|
||||
environment:
|
||||
#Solr needs to know how to register itself with Alfresco
|
||||
SOLR_ALFRESCO_HOST: "alfresco"
|
||||
SOLR_ALFRESCO_PORT: "${ALFRESCO_PORT}"
|
||||
#Alfresco needs to know how to call solr
|
||||
SOLR_SOLR_HOST: "${SOLR_HOST}"
|
||||
SOLR_SOLR_PORT: "8983"
|
||||
#Create the default alfresco and archive cores
|
||||
SOLR_CREATE_ALFRESCO_DEFAULTS: "alfresco,archive"
|
||||
#Enable Spellcheck by setting to true
|
||||
ENABLE_SPELLCHECK: "${SEARCH_ENABLE_SPELLCHECK}"
|
||||
#Disable Cascade Tracking
|
||||
DISABLE_CASCADE_TRACKING: "${DISABLE_CASCADE_TRACKING}"
|
||||
ports:
|
||||
- ${EXTERNAL_PORT}:8983 #Browser port
|
@@ -0,0 +1,39 @@
|
||||
FROM ${SEARCH_IMAGE}
|
||||
|
||||
# Create search_config_setup.sh if it does not exist (e.g. on SearchServices 1.2.x or earlier).
|
||||
USER root
|
||||
RUN touch $${DIST_DIR}/solr/bin/search_config_setup.sh \
|
||||
&& chown solr:solr $${DIST_DIR}/solr/bin/search_config_setup.sh
|
||||
USER solr
|
||||
|
||||
RUN replacementPairs=(${SOLRCORE_REPLACEMENTS}); \
|
||||
for replacementPair in $${replacementPairs[@]}; \
|
||||
do \
|
||||
sed -i '/^bash.*/i sed -i "'"s/$$replacementPair/g"'" $${DIST_DIR}/solrhome/templates/rerank/conf/solrcore.properties\n' \
|
||||
$${DIST_DIR}/solr/bin/search_config_setup.sh; \
|
||||
done; \
|
||||
if [[ "${SOLRCORE_PROPERTIES}" != "" ]]; \
|
||||
then \
|
||||
sed -i '/^bash.*/i echo "\n${SOLRCORE_PROPERTIES}" >> $${DIST_DIR}/solrhome/templates/rerank/conf/solrcore.properties\n' \
|
||||
$${DIST_DIR}/solr/bin/search_config_setup.sh; \
|
||||
fi
|
||||
|
||||
USER root
|
||||
RUN mkdir -p /opt/alfresco-search-services/keystore \
|
||||
&& chown -R solr:solr /opt/alfresco-search-services/keystore
|
||||
USER solr
|
||||
|
||||
# Set the search log level if requested.
|
||||
RUN if [ "${SEARCH_LOG_LEVEL}" != "" ] ; then \
|
||||
sed -i '/^bash.*/i sed -i "'"s/log4j.rootLogger=WARN, file, CONSOLE/log4j.rootLogger=${SEARCH_LOG_LEVEL}, file, CONSOLE/g"'" $${DIST_DIR}/logs/log4j.properties\n' \
|
||||
$${DIST_DIR}/solr/bin/search_config_setup.sh; \
|
||||
fi
|
||||
|
||||
# Enable cross locale configuration if requested.
|
||||
RUN if [[ "${ENABLE_CROSS_LOCALE}" == "True" ]] ; then \
|
||||
sed -i '/^bash.*/i sed -i "'"/alfresco.cross.locale.datatype/s/^#//g"'" $${DIST_DIR}/solrhome/conf/shared.properties\n' \
|
||||
$${DIST_DIR}/solr/bin/search_config_setup.sh; \
|
||||
fi
|
||||
|
||||
|
||||
VOLUME ["/opt/alfresco-search-services/keystore"]
|
@@ -1,11 +0,0 @@
|
||||
# docker-compose related environments
|
||||
ALFRESCO_IMAGE=quay.io/alfresco/alfresco-governance-repository-enterprise
|
||||
ALFRESCO_TAG=latest
|
||||
SHARE_IMAGE=quay.io/alfresco/alfresco-governance-share-enterprise
|
||||
SHARE_TAG=latest
|
||||
POSTGRES_IMAGE=postgres
|
||||
POSTGRES_TAG=10.1
|
||||
SEARCH_IMAGE=quay.io/alfresco/insight-engine
|
||||
SEARCH_TAG=latest
|
||||
ACTIVEMQ_IMAGE=alfresco/alfresco-activemq
|
||||
ACTIVEMQ_TAG=5.15.6
|
@@ -1,9 +0,0 @@
|
||||
include ../Makefile
|
||||
include .env
|
||||
|
||||
# CURRENT_DIR is the folder where this Makefile is saved
|
||||
CURRENT_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
|
||||
|
||||
start: ## 0 - starts search service with SSL enabled
|
||||
$(dc) config && $(dc) up -d && \
|
||||
make wait
|
@@ -1,65 +0,0 @@
|
||||
version: '3'
|
||||
services:
|
||||
alfresco:
|
||||
image: ${ALFRESCO_IMAGE}:${ALFRESCO_TAG}
|
||||
environment:
|
||||
JAVA_OPTS : "
|
||||
-Ddb.driver=org.postgresql.Driver
|
||||
-Ddb.username=alfresco
|
||||
-Ddb.password=alfresco
|
||||
-Ddb.url=jdbc:postgresql://postgres:5432/alfresco
|
||||
-Dsolr.host=search
|
||||
-Dsolr.port=8983
|
||||
-Dsolr.secureComms=none
|
||||
-Dsolr.base.url=/solr
|
||||
-Dindex.subsystem.name=solr6
|
||||
-Dalfresco.restApi.basicAuthScheme=true
|
||||
-Ddeployment.method=DOCKER_COMPOSE
|
||||
-Dcsrf.filter.enabled=false
|
||||
-Dmessaging.broker.url=\"failover:(nio://activemq:61616)?timeout=3000&jms.useCompression=true\"
|
||||
-Xms1g -Xmx1g
|
||||
"
|
||||
ports:
|
||||
- "7203:7203" #JMX connect via service:jmx:rmi:///jndi/rmi://localhost:7203/jmxrmi
|
||||
- "5005:5005" #Java debugging
|
||||
- "8081:8080" #Browser port for Alfresco
|
||||
|
||||
share:
|
||||
image: ${SHARE_IMAGE}:${SHARE_TAG}
|
||||
environment:
|
||||
- REPO_HOST=alfresco
|
||||
- REPO_PORT=8080
|
||||
- "CATALINA_OPTS= -Xms500m -Xmx500m"
|
||||
ports:
|
||||
- 8082:8080 #Browser port for Share
|
||||
|
||||
postgres:
|
||||
image: ${POSTGRES_IMAGE}:${POSTGRES_TAG}
|
||||
environment:
|
||||
- POSTGRES_PASSWORD=alfresco
|
||||
- POSTGRES_USER=alfresco
|
||||
- POSTGRES_DB=alfresco
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
search:
|
||||
image: ${SEARCH_IMAGE}:${SEARCH_TAG}
|
||||
environment:
|
||||
#Solr needs to know how to register itself with Alfresco
|
||||
- SOLR_ALFRESCO_HOST=alfresco
|
||||
- SOLR_ALFRESCO_PORT=8080
|
||||
#Alfresco needs to know how to call solr
|
||||
- SOLR_SOLR_HOST=search
|
||||
- SOLR_SOLR_PORT=8983
|
||||
#Create the default alfresco and archive cores
|
||||
- SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive
|
||||
ports:
|
||||
- 8083:8983 #Browser port
|
||||
|
||||
activemq:
|
||||
image: ${ACTIVEMQ_IMAGE}:${ACTIVEMQ_TAG}
|
||||
ports:
|
||||
- 8161:8161 # Web Console
|
||||
- 5672:5672 # AMQP
|
||||
- 61616:61616 # OpenWire
|
||||
- 61613:61613 # STOMP
|
@@ -1,11 +0,0 @@
|
||||
# docker-compose related environments
|
||||
ALFRESCO_IMAGE=alfresco/alfresco-content-repository
|
||||
ALFRESCO_TAG=6.1.0-EA3
|
||||
SHARE_IMAGE=alfresco/alfresco-share
|
||||
SHARE_TAG=6.0
|
||||
POSTGRES_IMAGE=postgres
|
||||
POSTGRES_TAG=10.1
|
||||
SEARCH_IMAGE=quay.io/alfresco/search-services
|
||||
SEARCH_TAG=latest
|
||||
ACTIVEMQ_IMAGE=alfresco/alfresco-activemq
|
||||
ACTIVEMQ_TAG=5.15.6
|
@@ -1,68 +0,0 @@
|
||||
include ../../Makefile
|
||||
include .env
|
||||
|
||||
# the suffix of the backup taken in time. It can be overriden on runtime: make SUFIX=T1 backup-perform
|
||||
SUFIX ?=T0
|
||||
# CURRENT_DIR is the folder where this Makefile is saved
|
||||
CURRENT_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
|
||||
|
||||
# this is used also in compose yml files
|
||||
export HOST_BACKUP_LOCATION:=$(CURRENT_DIR)/host-bkp
|
||||
|
||||
ifeq ($(dc-backup),)
|
||||
dc-backup:=$(dc) -f ../docker-compose.yml -f docker-compose.backup.yml
|
||||
endif
|
||||
|
||||
backup-prepare: clean ## 1 - prepare backup for testing
|
||||
@echo "Starting Backup Prepare" && \
|
||||
$(sudo) rm -rf $(HOST_BACKUP_LOCATION) && \
|
||||
mkdir -p $(HOST_BACKUP_LOCATION)/alf_data && \
|
||||
mkdir -p $(HOST_BACKUP_LOCATION)/solr/archive && \
|
||||
mkdir -p $(HOST_BACKUP_LOCATION)/solr/alfresco && \
|
||||
mkdir -p $(HOST_BACKUP_LOCATION)/db && \
|
||||
$(sudo) chmod -R 777 $(HOST_BACKUP_LOCATION) && \
|
||||
$(dc-backup) up -d
|
||||
|
||||
backup-perform: ## 2 - perform the backup of alf_data and db data
|
||||
@echo "Starting Backup Perform" && \
|
||||
$(sudo) rm -rf $(HOST_BACKUP_LOCATION)_$(SUFIX) && \
|
||||
$(sudo) chmod -R 777 $(HOST_BACKUP_LOCATION) && \
|
||||
$(dc-backup) stop alfresco && \
|
||||
$(dc-backup) exec postgres bash -c 'pg_dump --dbname=postgresql://alfresco:alfresco@127.0.0.1:5432/alfresco' > $(HOST_BACKUP_LOCATION)/db/alfresco.pg && \
|
||||
cp -R $(HOST_BACKUP_LOCATION) $(HOST_BACKUP_LOCATION)_$(SUFIX) && \
|
||||
$(dc-backup) start alfresco
|
||||
|
||||
backup-restore: clean ## 3 - start restoring from backup location
|
||||
@echo "Starting Backup Restore" && \
|
||||
$(sudo) rm -rf $(HOST_BACKUP_LOCATION) && \
|
||||
mkdir -p $(HOST_BACKUP_LOCATION) && \
|
||||
cp -rf $(HOST_BACKUP_LOCATION)_$(SUFIX)/alf_data $(HOST_BACKUP_LOCATION)/alf_data && \
|
||||
cp -rf $(HOST_BACKUP_LOCATION)_$(SUFIX)/db/ $(HOST_BACKUP_LOCATION)/db/ && \
|
||||
cp -rf $(HOST_BACKUP_LOCATION)_$(SUFIX)/solr $(HOST_BACKUP_LOCATION)/solr && \
|
||||
$(sudo) chmod -R 777 $(HOST_BACKUP_LOCATION) && \
|
||||
$(dc-backup) up -d postgres && sleep 30 && \
|
||||
$(dc-backup) exec postgres bash -c 'psql --dbname=postgresql://alfresco:alfresco@127.0.0.1:5432/alfresco < /backup/db/alfresco.pg' && \
|
||||
$(dc-backup) up -d
|
||||
|
||||
all: show-config ## 0 - executes the entire backup process
|
||||
# perform the backup and waits until the server is starting
|
||||
# do some change on backed up data
|
||||
# then restore from backup and check the content is restored as expected
|
||||
make backup-prepare wait && \
|
||||
make run-mvn-tests suiteXmlFile=./src/test/resources/search-pre-backup-suite.xml
|
||||
|
||||
make backup-perform wait && \
|
||||
make run-mvn-tests suiteXmlFile=./src/test/resources/search-on-backup-suite.xml
|
||||
|
||||
make backup-restore wait && \
|
||||
make run-mvn-tests suiteXmlFile=./src/test/resources/search-post-backup-suite.xml
|
||||
|
||||
show-config: ## show compose configuration
|
||||
$(dc-backup) config
|
||||
|
||||
clean: ## kill containers, remove volumes and data
|
||||
$(dc-backup) kill && $(dc-backup) rm -fv
|
||||
$(sudo) rm -rf $(HOST_BACKUP_LOCATION)
|
||||
|
||||
tail-logs: ## tails all container logs
|
||||
$(dc-backup) logs -f
|
@@ -1,49 +0,0 @@
|
||||
# About
|
||||
|
||||
Testing the Backup of SearchService product
|
||||
|
||||
**Build Plan:** https://bamboo.alfresco.com/bamboo/browse/SAD-QAB
|
||||
|
||||

|
||||
|
||||
# Steps
|
||||
|
||||
* **a)** prepare the backup
|
||||
```shel
|
||||
make backup-prepare wait
|
||||
```
|
||||
>more details on Makefile [task](Makefile#L27).
|
||||
|
||||
* **b)** create some data manually or using automated tests found on this project
|
||||
```shel
|
||||
make run-mvn-tests suiteXmlFile=./src/test/resources/search-pre-backup-suite.xml
|
||||
```
|
||||
|
||||
* **c)** perform the backup of data
|
||||
```shel
|
||||
make backup-perform wait
|
||||
```
|
||||
* **d)** now you can also update the data/remove it from TS, or even remove the entire volumes
|
||||
```shel
|
||||
make run-mvn-tests suiteXmlFile=./src/test/resources/search-on-backup-suite.xml
|
||||
# or
|
||||
make clean
|
||||
```
|
||||
* **e)** at any time you can restore the backup
|
||||
```shel
|
||||
make backup-restore wait
|
||||
```
|
||||
* **f)** now you can check the data from point **b)** is corectly recovered
|
||||
```shel
|
||||
make run-mvn-tests suiteXmlFile=./src/test/resources/search-post-backup-suite.xml
|
||||
```
|
||||
|
||||
# All in one
|
||||
At any time you can run the `make all` taks that will execute all the above commands for you
|
||||
|
||||
```shel
|
||||
make all
|
||||
```
|
||||
|
||||
# Environment Settings
|
||||
Pay attention at the values that exist in [.env](.env) file. These settings will be picked up in custom docker-compose.*.yml file(s)
|
@@ -1,39 +0,0 @@
|
||||
version: '3'
|
||||
services:
|
||||
alfresco:
|
||||
environment:
|
||||
JAVA_OPTS : "
|
||||
-Ddb.driver=org.postgresql.Driver
|
||||
-Ddb.username=alfresco
|
||||
-Ddb.password=alfresco
|
||||
-Ddb.url=jdbc:postgresql://postgres:5432/alfresco
|
||||
-Dsolr.host=search
|
||||
-Dsolr.port=8983
|
||||
-Dsolr.secureComms=none
|
||||
-Dsolr.base.url=/solr
|
||||
-Dindex.subsystem.name=solr6
|
||||
-Dalfresco.restApi.basicAuthScheme=true
|
||||
-Ddeployment.method=DOCKER_COMPOSE
|
||||
-Dcsrf.filter.enabled=false
|
||||
-Dmessaging.broker.url=\"failover:(nio://activemq:61616)?timeout=3000&jms.useCompression=true\"
|
||||
-Dsolr.backup.alfresco.remoteBackupLocation=/backup/solr/alfresco/
|
||||
-Dsolr.backup.alfresco.numberToKeep=1
|
||||
-Dsolr.backup.archive.remoteBackupLocation=/backup/solr/archive/
|
||||
-Dsolr.backup.archive.numberToKeep=1"
|
||||
volumes:
|
||||
- ${HOST_BACKUP_LOCATION}/alf_data:/usr/local/tomcat/alf_data
|
||||
|
||||
search:
|
||||
environment:
|
||||
- VERSION=${SEARCH_TAG}
|
||||
image: ${SEARCH_IMAGE}:${SEARCH_TAG}
|
||||
volumes:
|
||||
- ${HOST_BACKUP_LOCATION}/solr:/backup/solr
|
||||
|
||||
postgres:
|
||||
environment:
|
||||
- POSTGRES_PASSWORD=alfresco
|
||||
- POSTGRES_USER=alfresco
|
||||
- POSTGRES_DB=alfresco
|
||||
volumes:
|
||||
- ${HOST_BACKUP_LOCATION}/db:/backup/db
|
Binary file not shown.
Before Width: | Height: | Size: 258 KiB |
@@ -1,11 +0,0 @@
|
||||
# docker-compose related environments
|
||||
ALFRESCO_IMAGE=alfresco/alfresco-content-repository
|
||||
ALFRESCO_TAG=6.1.0-EA3
|
||||
SHARE_IMAGE=alfresco/alfresco-share
|
||||
SHARE_TAG=6.0
|
||||
POSTGRES_IMAGE=postgres
|
||||
POSTGRES_TAG=10.1
|
||||
SEARCH_IMAGE=quay.io/alfresco/search-services
|
||||
SEARCH_TAG=latest
|
||||
ACTIVEMQ_IMAGE=alfresco/alfresco-activemq
|
||||
ACTIVEMQ_TAG=5.15.6
|
@@ -1,16 +0,0 @@
|
||||
ARG SEARCH_TAG=latest
|
||||
FROM quay.io/alfresco/search-services:$SEARCH_TAG
|
||||
LABEL creator="Paul Brodner" maintainer="Alfresco Search Services Team"
|
||||
|
||||
ARG SCRIPTS_FOLDER=
|
||||
|
||||
USER root
|
||||
RUN echo " &" >> $DIST_DIR/solr/bin/search_config_setup.sh && \
|
||||
echo "bash -c \"find $DIST_DIR/scripts/ -maxdepth 1 -type f -executable -name '*.sh' -exec {} \\;\"" >> $DIST_DIR/solr/bin/search_config_setup.sh && \
|
||||
echo "bash -c \"tail -f $DIST_DIR/logs/solr.log\"" >> $DIST_DIR/solr/bin/search_config_setup.sh
|
||||
|
||||
USER solr
|
||||
COPY ${SCRIPTS_FOLDER}/* ${DIST_DIR}/scripts/
|
||||
|
||||
# we need this, because we tail on it in the search_config_setup.sh (see above)
|
||||
RUN touch ./logs/solr.log
|
@@ -1,21 +0,0 @@
|
||||
include ../../Makefile
|
||||
include .env
|
||||
|
||||
# CURRENT_DIR is the folder where this Makefile is saved
|
||||
CURRENT_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
|
||||
|
||||
ifeq ($(dc-custom),)
|
||||
dc-custom:=$(dc) -f ../docker-compose.yml -f docker-compose.custom.yml
|
||||
endif
|
||||
|
||||
|
||||
## ---- CUSTOM
|
||||
build: ## 1 - build a custom image: $ make SCRIPTS_FOLDER=spellcheck build
|
||||
ifndef SCRIPTS_FOLDER
|
||||
@echo SCRIPTS_FOLDER not defined "Usage: make SCRIPTS_FOLDER=spellcheck build"
|
||||
exit 1
|
||||
endif
|
||||
$(dc-custom) build --force-rm --no-cache --pull --build-arg SCRIPTS_FOLDER=$(SCRIPTS_FOLDER)
|
||||
|
||||
start: ## 2 - starts the custom image built: $ make start
|
||||
$(dc-custom) up -d && make wait
|
@@ -1,25 +0,0 @@
|
||||
# About
|
||||
|
||||
Start Search Service with a custom configuration
|
||||
|
||||
# Steps
|
||||
|
||||
* **a)** under `custom` folder create a new folder that will hold all settings
|
||||
>checkout [spellcheck](.spellcheck) folder for example
|
||||
|
||||
>add here any shell scripts that will enable/disable a particular setting
|
||||
|
||||
* **b)** build the new image setting SCRIPTS_FOLDER to you folder already created
|
||||
```shel
|
||||
make SCRIPTS_FOLDER=spellcheck build
|
||||
```
|
||||
>notice that out [docker-compose.custom.yml](.custom/docker-compose.custom.yml) file is using a [Dockerfile](.custom/Dockerfile) to built you new image.
|
||||
> at runtime, all shell scripts from your folder are executed and the settings are applied.
|
||||
|
||||
* **c)** the image is built locally, now start it up
|
||||
```shel
|
||||
make start
|
||||
```
|
||||
|
||||
# Environment Settings
|
||||
Pay attention at the values that exist in [.env](.env) file. These settings will be picked up in custom docker-compose.*.yml file(s)
|
@@ -1,9 +0,0 @@
|
||||
version: '3'
|
||||
services:
|
||||
search:
|
||||
build:
|
||||
context: ./custom
|
||||
dockerfile: Dockerfile
|
||||
image: quay.io/alfresco/search-services-custom:${SEARCH_TAG}
|
||||
volumes:
|
||||
- .:/backup
|
@@ -1,19 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -ex
|
||||
|
||||
echo "Enabling SpellCheck"
|
||||
cat <<EOF >> /opt/alfresco-search-services/solrhome/conf/shared.properties
|
||||
|
||||
# Enabling SpellCheck
|
||||
# configuration:
|
||||
# * http://docs.alfresco.com/6.0/concepts/solr-shared-properties.html
|
||||
# * https://docs.alfresco.com/5.2/tasks/solr6-install-withoutSSL.html
|
||||
# test it: http://docs.alfresco.com/6.0/concepts/search-api-spellcheck.html
|
||||
|
||||
# Suggestable Properties
|
||||
alfresco.suggestable.property.0={http://www.alfresco.org/model/content/1.0}name
|
||||
alfresco.suggestable.property.1={http://www.alfresco.org/model/content/1.0}title
|
||||
alfresco.suggestable.property.2={http://www.alfresco.org/model/content/1.0}description
|
||||
alfresco.suggestable.property.3={http://www.alfresco.org/model/content/1.0}content
|
||||
|
||||
EOF
|
@@ -1,63 +0,0 @@
|
||||
version: '3'
|
||||
services:
|
||||
alfresco:
|
||||
image: ${ALFRESCO_IMAGE}:${ALFRESCO_TAG}
|
||||
environment:
|
||||
JAVA_OPTS : "
|
||||
-Ddb.driver=org.postgresql.Driver
|
||||
-Ddb.username=alfresco
|
||||
-Ddb.password=alfresco
|
||||
-Ddb.url=jdbc:postgresql://postgres:5432/alfresco
|
||||
-Dsolr.host=search
|
||||
-Dsolr.port=8983
|
||||
-Dsolr.secureComms=none
|
||||
-Dsolr.base.url=/solr
|
||||
-Dindex.subsystem.name=solr6
|
||||
-Dalfresco.restApi.basicAuthScheme=true
|
||||
-Ddeployment.method=DOCKER_COMPOSE
|
||||
-Dcsrf.filter.enabled=false
|
||||
-Dmessaging.broker.url=\"failover:(nio://activemq:61616)?timeout=3000&jms.useCompression=true\"
|
||||
"
|
||||
ports:
|
||||
- "7203:7203" #JMX connect via service:jmx:rmi:///jndi/rmi://localhost:7203/jmxrmi
|
||||
- "5005:5005" #Java debugging
|
||||
- "8081:8080" #Browser port for Alfresco
|
||||
|
||||
share:
|
||||
image: ${SHARE_IMAGE}:${SHARE_TAG}
|
||||
environment:
|
||||
- REPO_HOST=alfresco
|
||||
- REPO_PORT=8080
|
||||
ports:
|
||||
- 8082:8080 #Browser port for Share
|
||||
|
||||
postgres:
|
||||
image: ${POSTGRES_IMAGE}:${POSTGRES_TAG}
|
||||
environment:
|
||||
- POSTGRES_PASSWORD=alfresco
|
||||
- POSTGRES_USER=alfresco
|
||||
- POSTGRES_DB=alfresco
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
search:
|
||||
image: ${SEARCH_IMAGE}:${SEARCH_TAG}
|
||||
environment:
|
||||
#Solr needs to know how to register itself with Alfresco
|
||||
- SOLR_ALFRESCO_HOST=alfresco
|
||||
- SOLR_ALFRESCO_PORT=8080
|
||||
#Alfresco needs to know how to call solr
|
||||
- SOLR_SOLR_HOST=search
|
||||
- SOLR_SOLR_PORT=8983
|
||||
#Create the default alfresco and archive cores
|
||||
- SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive
|
||||
ports:
|
||||
- 8083:8983 #Browser port
|
||||
|
||||
activemq:
|
||||
image: ${ACTIVEMQ_IMAGE}:${ACTIVEMQ_TAG}
|
||||
ports:
|
||||
- 8161:8161 # Web Console
|
||||
- 5672:5672 # AMQP
|
||||
- 61616:61616 # OpenWire
|
||||
- 61613:61613 # STOMP
|
@@ -1,11 +0,0 @@
|
||||
# docker-compose related environments
|
||||
ALFRESCO_IMAGE=alfresco/alfresco-content-repository
|
||||
ALFRESCO_TAG=6.1.0-EA3
|
||||
SHARE_IMAGE=alfresco/alfresco-share
|
||||
SHARE_TAG=6.0
|
||||
POSTGRES_IMAGE=postgres
|
||||
POSTGRES_TAG=10.1
|
||||
SEARCH_IMAGE=quay.io/alfresco/search-services
|
||||
SEARCH_TAG=latest
|
||||
ACTIVEMQ_IMAGE=alfresco/alfresco-activemq
|
||||
ACTIVEMQ_TAG=5.15.6
|
@@ -1,22 +0,0 @@
|
||||
# About
|
||||
|
||||
Start Alfresco services and scale SOLR to multiple instances, behind a LB.
|
||||
|
||||
# Steps
|
||||
|
||||
* **a)** Start Alfresco
|
||||
|
||||
```
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
* **b)** Scale SOLR to 2 instances
|
||||
|
||||
```
|
||||
docker-compose scale solr=2
|
||||
```
|
||||
|
||||
>it's possible at this time to restart `alfresco` service if there are not results returned by LB
|
||||
```
|
||||
docker-compose restart alfresco
|
||||
```
|
@@ -1,72 +0,0 @@
|
||||
version: '3'
|
||||
services:
|
||||
alfresco:
|
||||
image: ${ALFRESCO_IMAGE}:${ALFRESCO_TAG}
|
||||
environment:
|
||||
JAVA_OPTS : "
|
||||
-Ddb.driver=org.postgresql.Driver
|
||||
-Ddb.username=alfresco
|
||||
-Ddb.password=alfresco
|
||||
-Ddb.url=jdbc:postgresql://postgres:5432/alfresco
|
||||
-Dsolr.host=search
|
||||
-Dsolr.port=80
|
||||
-Dsolr.secureComms=none
|
||||
-Dsolr.base.url=/solr
|
||||
-Dindex.subsystem.name=solr6
|
||||
-Dalfresco.restApi.basicAuthScheme=true
|
||||
-Ddeployment.method=DOCKER_COMPOSE
|
||||
-Dcsrf.filter.enabled=false
|
||||
-Dmessaging.broker.url=\"failover:(nio://activemq:61616)?timeout=3000&jms.useCompression=true\"
|
||||
"
|
||||
ports:
|
||||
- "7203:7203" #JMX connect via service:jmx:rmi:///jndi/rmi://localhost:7203/jmxrmi
|
||||
- "5005:5005" #Java debugging
|
||||
- "8081:8080" #Browser port for Alfresco
|
||||
|
||||
share:
|
||||
image: ${SHARE_IMAGE}:${SHARE_TAG}
|
||||
environment:
|
||||
- REPO_HOST=alfresco
|
||||
- REPO_PORT=8080
|
||||
ports:
|
||||
- 8082:8080 #Browser port for Share
|
||||
|
||||
postgres:
|
||||
image: ${POSTGRES_IMAGE}:${POSTGRES_TAG}
|
||||
environment:
|
||||
- POSTGRES_PASSWORD=alfresco
|
||||
- POSTGRES_USER=alfresco
|
||||
- POSTGRES_DB=alfresco
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
search:
|
||||
image: dockercloud/haproxy
|
||||
links:
|
||||
- solr
|
||||
ports:
|
||||
- 8083:80 #Browser port
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
|
||||
solr:
|
||||
image: ${SEARCH_IMAGE}:${SEARCH_TAG}
|
||||
environment:
|
||||
#Solr needs to know how to register itself with Alfresco
|
||||
- SOLR_ALFRESCO_HOST=alfresco
|
||||
- SOLR_ALFRESCO_PORT=8080
|
||||
#Alfresco needs to know how to call solr
|
||||
- SOLR_SOLR_HOST=search
|
||||
- SOLR_SOLR_PORT=8983
|
||||
#Create the default alfresco and archive cores
|
||||
- SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive
|
||||
ports:
|
||||
- 8983 #Browser port
|
||||
|
||||
activemq:
|
||||
image: ${ACTIVEMQ_IMAGE}:${ACTIVEMQ_TAG}
|
||||
ports:
|
||||
- 8161:8161 # Web Console
|
||||
- 5672:5672 # AMQP
|
||||
- 61616:61616 # OpenWire
|
||||
- 61613:61613 # STOMP
|
@@ -1,11 +0,0 @@
|
||||
# docker-compose related environments
|
||||
ALFRESCO_IMAGE=alfresco/alfresco-content-repository
|
||||
ALFRESCO_TAG=6.1.0-EA3
|
||||
SHARE_IMAGE=alfresco/alfresco-share
|
||||
SHARE_TAG=6.0
|
||||
POSTGRES_IMAGE=postgres
|
||||
POSTGRES_TAG=10.1
|
||||
SEARCH_IMAGE=quay.io/alfresco/search-services
|
||||
SEARCH_TAG=latest
|
||||
ACTIVEMQ_IMAGE=alfresco/alfresco-activemq
|
||||
ACTIVEMQ_TAG=5.15.6
|
@@ -1,41 +0,0 @@
|
||||
include ../../Makefile
|
||||
include .env
|
||||
|
||||
# CURRENT_DIR is the folder where this Makefile is saved
|
||||
CURRENT_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
|
||||
|
||||
ifeq ($(dc-upgrade),)
|
||||
dc-upgrade:=$(dc) -f ../docker-compose.yml -f docker-compose.upgrade.yml
|
||||
endif
|
||||
|
||||
## ---- UPGRADE
|
||||
set_version ?=latest # default version that will be used in tasks
|
||||
|
||||
as-previous: clean ## 1 - install the previous version: $ make set_version=1.2.1 as-previous
|
||||
rm -rf ./solr-data && \
|
||||
rm -rf ./solr-contentstore && \
|
||||
rm -f ./image-digests.txt
|
||||
export SEARCH_TAG=$(set_version) && \
|
||||
$(dc-upgrade) pull && \
|
||||
echo "\n====Previous====" > image-digests.txt && \
|
||||
$(dc-upgrade) config --resolve-image-digests >> image-digests.txt && \
|
||||
$(dc-upgrade) up -d && \
|
||||
docker ps
|
||||
|
||||
as-current: ## 2 - upgrade previous to this version $ make set_version=2.0.x as-current
|
||||
$(dc-upgrade) kill search && \
|
||||
$(dc-upgrade) rm -f search && \
|
||||
export SEARCH_TAG=$(set_version) && \
|
||||
$(dc-upgrade) pull search && \
|
||||
echo "\n====Current====" >> image-digests.txt && \
|
||||
$(dc-upgrade) config --resolve-image-digests >> image-digests.txt && \
|
||||
$(dc-upgrade) up -d search && \
|
||||
docker ps
|
||||
|
||||
#
|
||||
# Run the following commands if you need to test the upgrade e2e
|
||||
#
|
||||
#make set_version=1.2.1 as-previous wait
|
||||
#make run-mvn-tests suiteXmlFile=./src/test/resources/search-pre-upgrade-suite.xml
|
||||
#make set_version=2.0.x as-current wait
|
||||
#make run-mvn-tests suiteXmlFile=./src/test/resources/search-post-upgrade-suite.xml
|
@@ -1,31 +0,0 @@
|
||||
# About
|
||||
|
||||
Testing the Upgrade of SearchService product
|
||||
|
||||
**Build Plan:** https://bamboo.alfresco.com/bamboo/browse/SAD-QAUP
|
||||
|
||||

|
||||
|
||||
# Steps
|
||||
|
||||
* **a)** start the initial version
|
||||
```shel
|
||||
make set_version=1.2.1 as-previous wait
|
||||
```
|
||||
>notice that new folders will appear on you "upgrade" folder with data from container(s)
|
||||
|
||||
* **b)** create some data manually or using automated tests found on this project
|
||||
```shel
|
||||
make run-mvn-tests suiteXmlFile=./src/test/resources/search-pre-upgrade-suite.xml
|
||||
```
|
||||
* **c)** now upgrade to new version
|
||||
```shel
|
||||
make set_version=2.0.x as-current wait
|
||||
```
|
||||
* **d)** and test that upgrade data exist
|
||||
```shel
|
||||
make run-mvn-tests suiteXmlFile=./src/test/resources/search-post-upgrade-suite.xml
|
||||
```
|
||||
|
||||
# Environment Settings
|
||||
Pay attention at the values that exist in [.env](.env) file. These settings will be picked up in custom docker-compose.*.yml file(s)
|
@@ -1,10 +0,0 @@
|
||||
version: '3'
|
||||
services:
|
||||
search:
|
||||
environment:
|
||||
- VERSION=${SEARCH_TAG}
|
||||
image: quay.io/alfresco/search-services:${SEARCH_TAG}
|
||||
volumes:
|
||||
- "./upgrade/solr-data:/opt/alfresco-search-services/data"
|
||||
- "./upgrade/solr-contentstore:/opt/alfresco-search-services/contentstore"
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 216 KiB |
@@ -1,15 +0,0 @@
|
||||
# docker-compose related environments
|
||||
ALFRESCO_IMAGE=alfresco/alfresco-content-repository
|
||||
ALFRESCO_TAG=6.1.0-EA3
|
||||
SHARE_IMAGE=alfresco/alfresco-share
|
||||
SHARE_TAG=6.0
|
||||
POSTGRES_IMAGE=postgres
|
||||
POSTGRES_TAG=10.1
|
||||
SEARCH_IMAGE=quay.io/alfresco/search-services
|
||||
SEARCH_TAG=latest
|
||||
DIST_DIR_PATH=/opt/alfresco-search-services
|
||||
#SEARCH_IMAGE=quay.io/alfresco/insight-engine
|
||||
#SEARCH_TAG=lates
|
||||
#DIST_DIR_PATH=/opt/alfresco-insight-engine
|
||||
ACTIVEMQ_IMAGE=alfresco/alfresco-activemq
|
||||
ACTIVEMQ_TAG=5.15.6
|
@@ -1,39 +0,0 @@
|
||||
include ../Makefile
|
||||
include .env
|
||||
|
||||
# CURRENT_DIR is the folder where this Makefile is saved
|
||||
CURRENT_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
|
||||
|
||||
SEARCH_IMAGE ?=quay.io/alfresco/search-services
|
||||
SEARCH_TAG ?=latest # default version that will be used in tasks
|
||||
|
||||
as-previous: clean ## 1 - install the previous version: $ make SEARCH_IMAGE=quay.io/alfresco/search-services SEARCH_TAG=1.2.1 as-previous
|
||||
rm -rf ./solr-data && \
|
||||
rm -rf ./solr-contentstore && \
|
||||
rm -f ./image-digests.txt && \
|
||||
export SEARCH_TAG=$(SEARCH_TAG) && \
|
||||
export SEARCH_IMAGE=$(SEARCH_IMAGE) && \
|
||||
$(dc) pull && \
|
||||
echo "\n====Previous====" > image-digests.txt && \
|
||||
$(dc) config --resolve-image-digests >> image-digests.txt && \
|
||||
$(dc) up -d && \
|
||||
docker ps
|
||||
|
||||
as-current: ## 2 - upgrade previous to this version $ make SEARCH_IMAGE=quay.io/alfresco/search-services SEARCH_TAG=2.0.x as-current
|
||||
$(dc) kill search && \
|
||||
$(dc) rm -f search && \
|
||||
export SEARCH_TAG=$(SEARCH_TAG) && \
|
||||
export SEARCH_IMAGE=$(SEARCH_IMAGE) && \
|
||||
$(dc) pull search && \
|
||||
echo "\n====Current====" >> image-digests.txt && \
|
||||
$(dc) config --resolve-image-digests >> image-digests.txt && \
|
||||
$(dc) up -d search && \
|
||||
docker ps
|
||||
#
|
||||
# Run the following commands if you need to test the upgrade e2e
|
||||
#
|
||||
#make SEARCH_IMAGE=quay.io/alfresco/search-services SEARCH_TAG=1.2.1 as-previous wait
|
||||
#make run-mvn-tests suiteXmlFile=./src/test/resources/search-pre-upgrade-suite.xml
|
||||
|
||||
#make SEARCH_IMAGE=quay.io/alfresco/search-services SEARCH_TAG=2.0.x as-current wait
|
||||
#make run-mvn-tests suiteXmlFile=./src/test/resources/search-post-upgrade-suite.xml
|
@@ -1,66 +0,0 @@
|
||||
version: '3'
|
||||
services:
|
||||
alfresco:
|
||||
image: ${ALFRESCO_IMAGE}:${ALFRESCO_TAG}
|
||||
environment:
|
||||
JAVA_OPTS : "
|
||||
-Ddb.driver=org.postgresql.Driver
|
||||
-Ddb.username=alfresco
|
||||
-Ddb.password=alfresco
|
||||
-Ddb.url=jdbc:postgresql://postgres:5432/alfresco
|
||||
-Dsolr.host=search
|
||||
-Dsolr.port=8983
|
||||
-Dsolr.secureComms=none
|
||||
-Dsolr.base.url=/solr
|
||||
-Dindex.subsystem.name=solr6
|
||||
-Dalfresco.restApi.basicAuthScheme=true
|
||||
-Ddeployment.method=DOCKER_COMPOSE
|
||||
-Dcsrf.filter.enabled=false
|
||||
-Dmessaging.broker.url=\"failover:(nio://activemq:61616)?timeout=3000&jms.useCompression=true\"
|
||||
"
|
||||
ports:
|
||||
- "7203:7203" #JMX connect via service:jmx:rmi:///jndi/rmi://localhost:7203/jmxrmi
|
||||
- "5005:5005" #Java debugging
|
||||
- "8081:8080" #Browser port for Alfresco
|
||||
|
||||
share:
|
||||
image: ${SHARE_IMAGE}:${SHARE_TAG}
|
||||
environment:
|
||||
- REPO_HOST=alfresco
|
||||
- REPO_PORT=8080
|
||||
ports:
|
||||
- 8082:8080 #Browser port for Share
|
||||
|
||||
postgres:
|
||||
image: ${POSTGRES_IMAGE}:${POSTGRES_TAG}
|
||||
environment:
|
||||
- POSTGRES_PASSWORD=alfresco
|
||||
- POSTGRES_USER=alfresco
|
||||
- POSTGRES_DB=alfresco
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
search:
|
||||
image: ${SEARCH_IMAGE}:${SEARCH_TAG}
|
||||
environment:
|
||||
#Solr needs to know how to register itself with Alfresco
|
||||
- SOLR_ALFRESCO_HOST=alfresco
|
||||
- SOLR_ALFRESCO_PORT=8080
|
||||
#Alfresco needs to know how to call solr
|
||||
- SOLR_SOLR_HOST=search
|
||||
- SOLR_SOLR_PORT=8983
|
||||
#Create the default alfresco and archive cores
|
||||
- SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive
|
||||
ports:
|
||||
- 8083:8983 #Browser port
|
||||
volumes:
|
||||
- "./upgrade/solr-data:${DIST_DIR_PATH}/data"
|
||||
- "./upgrade/solr-contentstore:/opt/${DIST_DIR_PATH}/contentstore"
|
||||
|
||||
activemq:
|
||||
image: ${ACTIVEMQ_IMAGE}:${ACTIVEMQ_TAG}
|
||||
ports:
|
||||
- 8161:8161 # Web Console
|
||||
- 5672:5672 # AMQP
|
||||
- 61616:61616 # OpenWire
|
||||
- 61613:61613 # STOMP
|
@@ -44,6 +44,8 @@ public class TestGroup
|
||||
|
||||
public static final String CONFIG_ENABLED_CASCADE_TRACKER ="Config_Enabled_Cascade_Tracker"; // Alfresco search services does not index fields related to cascaded updates
|
||||
|
||||
public static final String CROSS_LOCALE_SUPPORT_DISABLED = "CROSS_LOCALE_SUPPORT_DISABLED";
|
||||
|
||||
public static final String NOT_INSIGHT_ENGINE = "Not_InsightEngine"; // When Alfresco Insight Engine 1.0 isn't running
|
||||
|
||||
public static final String ACS_52n = "ACS_52n"; // Alfresco Content Services 5.2.n
|
||||
@@ -52,6 +54,7 @@ public class TestGroup
|
||||
public static final String ACS_611n = "ACS_611n"; // Alfresco Content Services 6.1.1 or above
|
||||
public static final String ACS_62n = "ACS_62n"; // Alfresco Content Services 6.2 or above
|
||||
public static final String ACS_63n = "ACS_63n"; // Alfresco Content Services 6.3 or above
|
||||
public static final String ACS_701n = "ACS_701n"; // Alfresco Content Services 7.0.1 or above
|
||||
|
||||
public static final String AGS_302 = "AGS_302"; // Alfresco governance Services 3.0.2 or above
|
||||
}
|
||||
|
@@ -126,6 +126,18 @@
|
||||
<facetable>true</facetable>
|
||||
</index>
|
||||
</property>
|
||||
<property name="allfieldtypes:multiplemltext">
|
||||
<title>mltextMultiple</title>
|
||||
<type>d:mltext</type>
|
||||
<mandatory>false</mandatory>
|
||||
<multiple>true</multiple>
|
||||
<index enabled="true">
|
||||
<atomic>true</atomic>
|
||||
<stored>false</stored>
|
||||
<tokenised>both</tokenised>
|
||||
<facetable>true</facetable>
|
||||
</index>
|
||||
</property>
|
||||
<property name="allfieldtypes:textPatternMany">
|
||||
<title>textPatternMany</title>
|
||||
<type>d:text</type>
|
||||
|
36
e2e-test/src/main/resources/model/tokenised-model.xml
Normal file
36
e2e-test/src/main/resources/model/tokenised-model.xml
Normal file
@@ -0,0 +1,36 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<model xmlns="http://www.alfresco.org/model/dictionary/1.0" name="tok:tok">
|
||||
<imports>
|
||||
<import uri="http://www.alfresco.org/model/content/1.0" prefix="cm"/>
|
||||
<import uri="http://www.alfresco.org/model/dictionary/1.0" prefix="d"/>
|
||||
</imports>
|
||||
<namespaces>
|
||||
<namespace uri="http://www.alfresco.org/model/tokenised/1.0" prefix="tok"/>
|
||||
</namespaces>
|
||||
<types>
|
||||
<type name="tok:document">
|
||||
<parent>cm:content</parent>
|
||||
<properties>
|
||||
<property name="tok:true">
|
||||
<type>d:text</type>
|
||||
<index enabled="true">
|
||||
<tokenised>TRUE</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="tok:false">
|
||||
<type>d:text</type>
|
||||
<index enabled="true">
|
||||
<tokenised>FALSE</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
<property name="tok:both">
|
||||
<type>d:text</type>
|
||||
<index enabled="true">
|
||||
<tokenised>BOTH</tokenised>
|
||||
</index>
|
||||
</property>
|
||||
</properties>
|
||||
</type>
|
||||
</types>
|
||||
<aspects/>
|
||||
</model>
|
@@ -2,23 +2,23 @@
|
||||
* #%L
|
||||
* Alfresco Search Services E2E Test
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2022 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
@@ -43,6 +43,7 @@ import org.alfresco.dataprep.ContentService;
|
||||
import org.alfresco.dataprep.SiteService.Visibility;
|
||||
import org.alfresco.rest.core.RestProperties;
|
||||
import org.alfresco.rest.core.RestWrapper;
|
||||
import org.alfresco.rest.exception.EmptyJsonResponseException;
|
||||
import org.alfresco.rest.exception.EmptyRestModelCollectionException;
|
||||
import org.alfresco.rest.model.RestRequestSpellcheckModel;
|
||||
import org.alfresco.rest.search.Pagination;
|
||||
@@ -83,7 +84,7 @@ import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
public abstract class AbstractE2EFunctionalTest extends AbstractTestNGSpringContextTests
|
||||
{
|
||||
/** The number of retries that a query will be tried before giving up. */
|
||||
protected static final int SEARCH_MAX_ATTEMPTS = 6;
|
||||
protected static final int SEARCH_MAX_ATTEMPTS = 120;
|
||||
|
||||
private static final Logger LOGGER = LogFactory.getLogger();
|
||||
|
||||
@@ -120,7 +121,7 @@ public abstract class AbstractE2EFunctionalTest extends AbstractTestNGSpringCont
|
||||
protected SiteModel testSite, testSite2;
|
||||
|
||||
protected static String unique_searchString;
|
||||
|
||||
|
||||
protected static String shardingMethod = "DB_ID";
|
||||
protected int shardCount = 0;
|
||||
|
||||
@@ -294,7 +295,6 @@ public abstract class AbstractE2EFunctionalTest extends AbstractTestNGSpringCont
|
||||
*/
|
||||
public boolean isContentInSearchResults(String userQuery, String contentToFind, boolean expectedInResults) {
|
||||
|
||||
String expectedStatusCode = HttpStatus.OK.toString();
|
||||
String contentName = (contentToFind == null) ? "" : contentToFind;
|
||||
|
||||
SearchRequest searchRequest = createQuery(userQuery);
|
||||
@@ -302,29 +302,38 @@ public abstract class AbstractE2EFunctionalTest extends AbstractTestNGSpringCont
|
||||
// Repeat search until the query results are as expected or Search Retry count is hit
|
||||
for (int searchCount = 0; searchCount < SEARCH_MAX_ATTEMPTS; searchCount++)
|
||||
{
|
||||
SearchResponse response = query(searchRequest);
|
||||
|
||||
if (restClient.getStatusCode().matches(expectedStatusCode))
|
||||
try
|
||||
{
|
||||
boolean found = isContentInSearchResponse(response, contentName);
|
||||
|
||||
// Exit loop if result is as expected.
|
||||
if (expectedInResults == found)
|
||||
if (expectedInResults == isContentFoundWithRequest(searchRequest, contentName))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Wait for the solr indexing (eventual consistency).
|
||||
Utility.waitToLoopTime(properties.getSolrWaitTimeInSeconds(), "Wait For Indexing. Retry Attempt: " + (searchCount + 1));
|
||||
}
|
||||
else
|
||||
catch (EmptyJsonResponseException ignore)
|
||||
{
|
||||
throw new RuntimeException("API returned status code:" + restClient.getStatusCode() + " Expected: " + expectedStatusCode);
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean isContentFoundWithRequest(SearchRequest searchRequest, String contentName)
|
||||
{
|
||||
SearchResponse response = query(searchRequest);
|
||||
|
||||
if (restClient.getStatusCode().matches(HttpStatus.OK.toString()))
|
||||
{
|
||||
return isContentInSearchResponse(response, contentName);
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new RuntimeException("API returned status code:" + restClient.getStatusCode() + " Expected: " + HttpStatus.OK + "; Response body: " + response);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Method to check if the contentName is returned in the SearchResponse.
|
||||
*
|
||||
@@ -439,7 +448,7 @@ public abstract class AbstractE2EFunctionalTest extends AbstractTestNGSpringCont
|
||||
{
|
||||
SearchRequest searchRequest = new SearchRequest();
|
||||
searchRequest.setQuery(queryModel);
|
||||
|
||||
|
||||
if (ofNullable(paging).isPresent())
|
||||
{
|
||||
searchRequest.setPaging(paging);
|
||||
@@ -508,7 +517,7 @@ public abstract class AbstractE2EFunctionalTest extends AbstractTestNGSpringCont
|
||||
|
||||
// Include lists in failure message as TestNG won't do this for lists.
|
||||
assertEquals(names, expectedNames, "Unexpected results for query: " + query + " Expected: " + expectedNames + " but got " + names);
|
||||
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
@@ -522,11 +531,11 @@ public abstract class AbstractE2EFunctionalTest extends AbstractTestNGSpringCont
|
||||
protected SearchResponse testSearchQueryUnordered(String query, Set<String> expectedNames, SearchLanguage queryLanguage)
|
||||
{
|
||||
SearchResponse response = performSearch(testUser, query, queryLanguage, getDefaultPagingOptions());
|
||||
|
||||
|
||||
Set<String> names = response.getEntries().stream().map(s -> s.getModel().getName()).collect(Collectors.toSet());
|
||||
|
||||
|
||||
assertEquals(names, expectedNames, "Unexpected results for query: " + query);
|
||||
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
@@ -553,7 +562,7 @@ public abstract class AbstractE2EFunctionalTest extends AbstractTestNGSpringCont
|
||||
/**
|
||||
* Returns pagination object with alfresco default settings
|
||||
* Sets skipCount = 0, maxItems = 100
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private Pagination getDefaultPagingOptions()
|
||||
@@ -574,7 +583,7 @@ public abstract class AbstractE2EFunctionalTest extends AbstractTestNGSpringCont
|
||||
protected Pagination setPaging(Integer skipCount, Integer maxItems)
|
||||
{
|
||||
Pagination paging = new Pagination();
|
||||
|
||||
|
||||
if (ofNullable(skipCount).isPresent())
|
||||
{
|
||||
paging.setSkipCount(skipCount);
|
||||
@@ -584,7 +593,7 @@ public abstract class AbstractE2EFunctionalTest extends AbstractTestNGSpringCont
|
||||
{
|
||||
paging.setMaxItems(maxItems);
|
||||
}
|
||||
|
||||
|
||||
return paging;
|
||||
}
|
||||
|
||||
@@ -645,7 +654,7 @@ public abstract class AbstractE2EFunctionalTest extends AbstractTestNGSpringCont
|
||||
public String getShardMethod() throws JsonProcessingException, EmptyRestModelCollectionException
|
||||
{
|
||||
RestShardInfoModelCollection info = getShardInfo();
|
||||
|
||||
|
||||
return shardingMethod = ofNullable(info)
|
||||
.map(RestShardInfoModelCollection::getEntries)
|
||||
.map(Collection::iterator).filter(Iterator::hasNext)
|
||||
@@ -664,7 +673,7 @@ public abstract class AbstractE2EFunctionalTest extends AbstractTestNGSpringCont
|
||||
public int getShardCount() throws JsonProcessingException, EmptyRestModelCollectionException
|
||||
{
|
||||
RestShardInfoModelCollection info = getShardInfo();
|
||||
|
||||
|
||||
return shardCount = ofNullable(info)
|
||||
.map(RestShardInfoModelCollection::getEntries)
|
||||
.map(Collection::iterator)
|
||||
|
@@ -0,0 +1,176 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Search Services E2E Test
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.test.search.functional;
|
||||
|
||||
import static com.google.common.collect.ImmutableMap.of;
|
||||
import static java.util.Arrays.asList;
|
||||
import static java.util.stream.IntStream.range;
|
||||
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneId;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.rest.search.SearchResponse;
|
||||
import org.alfresco.utility.constants.UserRole;
|
||||
import org.alfresco.utility.data.DataContent;
|
||||
import org.alfresco.utility.data.DataSite;
|
||||
import org.alfresco.utility.model.FileModel;
|
||||
import org.alfresco.utility.model.FileType;
|
||||
import org.alfresco.utility.model.FolderModel;
|
||||
import org.apache.chemistry.opencmis.commons.PropertyIds;
|
||||
import org.apache.chemistry.opencmis.commons.enums.VersioningState;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.testng.Assert;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
|
||||
/**
|
||||
* Base corpus for Exact Term tests.
|
||||
* SearchExactTerm tests, with and without cross locale configuration, are using this corpus
|
||||
* so results can be compared.
|
||||
*/
|
||||
public abstract class AbstractSearchExactTermTest extends AbstractE2EFunctionalTest
|
||||
{
|
||||
@Autowired
|
||||
protected DataSite dataSite;
|
||||
|
||||
@Autowired
|
||||
protected DataContent dataContent;
|
||||
|
||||
private static final DateFormat QUERY_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd");
|
||||
protected String fromDate;
|
||||
protected String toDate;
|
||||
|
||||
private void prepareExactSearchData(FolderModel testFolder) throws Exception {
|
||||
|
||||
List<Map<String, String>> exactSearchData = asList(
|
||||
// Document #1
|
||||
of("name", "Running",
|
||||
"description", "Running is a sport is a nice activity",
|
||||
"content", "when you are running you are doing an amazing sport",
|
||||
"title", "Running jumping"),
|
||||
// Document #2
|
||||
of("name", "Run",
|
||||
"description", "you are supposed to run jump",
|
||||
"content", "after many runs you are tired and if you jump it happens the same",
|
||||
"title", "Run : a philosophy"),
|
||||
// Document #3
|
||||
of("name", "Poetry",
|
||||
"description", "a document about poetry and jumpers",
|
||||
"content", "poetry is unrelated to sport",
|
||||
"title", "Running jumping twice jumpers"),
|
||||
// Document #4
|
||||
of("name", "Jump",
|
||||
"description", "a document about jumps",
|
||||
"content", "runnings jumpings",
|
||||
"title", "Running"),
|
||||
// Document #5
|
||||
of("name", "Running jumping",
|
||||
"description", "runners jumpers runs everywhere",
|
||||
"content", "run is Good as jump",
|
||||
"title", "Running the art of jumping"));
|
||||
|
||||
// tok:true, tok:false and tok:both have a copy of the value in cm:title field
|
||||
|
||||
List<FileModel> createdFileModels = new ArrayList<>();
|
||||
range(0, exactSearchData.size())
|
||||
.forEach(id -> {
|
||||
|
||||
Map<String, String> record = exactSearchData.get(id);
|
||||
|
||||
Map<String, Object> properties = new HashMap<>();
|
||||
properties.put(PropertyIds.OBJECT_TYPE_ID, "D:tok:document");
|
||||
properties.put(PropertyIds.NAME, record.get("name"));
|
||||
properties.put("cm:title", record.get("title"));
|
||||
properties.put("cm:description", record.get("description"));
|
||||
properties.put("tok:true", record.get("title"));
|
||||
properties.put("tok:false", record.get("title"));
|
||||
properties.put("tok:both", record.get("title"));
|
||||
properties.put(PropertyIds.SECONDARY_OBJECT_TYPE_IDS, List.of("P:cm:titled"));
|
||||
|
||||
FileModel fileModel = FileModel.getRandomFileModel(FileType.TEXT_PLAIN, record.get("content"));
|
||||
fileModel.setName(record.get("name"));
|
||||
|
||||
cmisApi.authenticateUser(testUser).usingSite(testSite).usingResource(testFolder)
|
||||
.createFile(fileModel, properties, VersioningState.MAJOR)
|
||||
.assertThat().existsInRepo();
|
||||
|
||||
createdFileModels.add(fileModel);
|
||||
|
||||
});
|
||||
|
||||
waitForContentIndexing(createdFileModels.get(createdFileModels.size() - 1).getName(), true);
|
||||
|
||||
}
|
||||
|
||||
@BeforeClass(alwaysRun = true)
|
||||
public void dataPreparation() throws Exception
|
||||
{
|
||||
serverHealth.assertServerIsOnline();
|
||||
|
||||
deployCustomModel("model/tokenised-model.xml");
|
||||
|
||||
dataUser.addUserToSite(testUser, testSite, UserRole.SiteContributor);
|
||||
|
||||
FolderModel testFolder = dataContent.usingSite(testSite).usingUser(testUser).createFolder();
|
||||
prepareExactSearchData(testFolder);
|
||||
|
||||
// Calculate time query range, required for conjunction queries
|
||||
Date today = new Date();
|
||||
|
||||
LocalDateTime yesterday = today.toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime();
|
||||
yesterday = yesterday.plusDays(-1);
|
||||
fromDate = QUERY_DATE_FORMAT.format(Date.from(yesterday.atZone(ZoneId.systemDefault()).toInstant()));
|
||||
|
||||
LocalDateTime tomorrow = today.toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime();
|
||||
tomorrow = tomorrow.plusDays(1);
|
||||
toDate = QUERY_DATE_FORMAT.format(Date.from(tomorrow.atZone(ZoneId.systemDefault()).toInstant()));
|
||||
|
||||
}
|
||||
|
||||
protected void assertResponseCardinality(String query, int num)
|
||||
{
|
||||
SearchResponse response = queryAsUser(testUser, query);
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
Assert.assertEquals(response.getPagination().getCount(), num, query);
|
||||
}
|
||||
|
||||
protected void assertException(String query)
|
||||
{
|
||||
queryAsUser(testUser, query);
|
||||
Assert.assertTrue(
|
||||
restClient.getStatusCode().equals(String.valueOf(HttpStatus.NOT_IMPLEMENTED)) ||
|
||||
restClient.getStatusCode().equals(String.valueOf(HttpStatus.INTERNAL_SERVER_ERROR)),
|
||||
"Status code is not as expected.");
|
||||
}
|
||||
|
||||
}
|
@@ -2,23 +2,23 @@
|
||||
* #%L
|
||||
* Alfresco Search Services E2E Test
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2023 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
@@ -26,10 +26,16 @@
|
||||
|
||||
package org.alfresco.test.search.functional.searchServices.search;
|
||||
|
||||
import org.alfresco.rest.exception.EmptyJsonResponseException;
|
||||
import org.alfresco.rest.search.SearchResponse;
|
||||
import org.alfresco.test.search.functional.AbstractE2EFunctionalTest;
|
||||
import org.alfresco.utility.Utility;
|
||||
import org.alfresco.utility.model.FileModel;
|
||||
import org.alfresco.utility.model.FileType;
|
||||
import org.alfresco.utility.model.FolderModel;
|
||||
import org.alfresco.utility.model.UserModel;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.testng.Assert;
|
||||
|
||||
import static java.util.List.of;
|
||||
|
||||
@@ -46,6 +52,15 @@ import static java.util.List.of;
|
||||
public abstract class AbstractSearchServicesE2ETest extends AbstractE2EFunctionalTest
|
||||
{
|
||||
private static final String SEARCH_DATA_SAMPLE_FOLDER = "FolderSearch";
|
||||
private static final int MAX_ATTEMPTS_TO_RETRY_QUERY = 10;
|
||||
private static final int MAX_WAIT_IN_SECONDS_BEFORE_RETRY_QUERY = 5;
|
||||
private static final int MAX_ATTEMPTS_TO_READ_RESPONSE = 10;
|
||||
private static final int MAX_WAIT_IN_SECONDS_BEFORE_REREAD_RESPONSE = 2;
|
||||
|
||||
/** The maximum time to wait for content indexing to complete (in ms). */
|
||||
private static final int MAX_TIME = 120 * 1000;
|
||||
/** The frequency to check the report (in ms). */
|
||||
private static final int RETRY_INTERVAL = 30000;
|
||||
|
||||
protected FileModel file, file2, file3, file4;
|
||||
protected FolderModel folder;
|
||||
@@ -71,20 +86,73 @@ public abstract class AbstractSearchServicesE2ETest extends AbstractE2EFunctiona
|
||||
file = new FileModel("pangram.txt", "pangram" + title, description, FileType.TEXT_PLAIN,
|
||||
description + " The quick brown fox jumps over the lazy dog");
|
||||
|
||||
file2 = new FileModel("cars.txt", "cars" + title, description, FileType.TEXT_PLAIN,
|
||||
"The landrover discovery is not a sports car ");
|
||||
file2 = new FileModel("cars.PDF", "cars", description, FileType.TEXT_PLAIN,
|
||||
"The landrover discovery is not a sports car");
|
||||
|
||||
file3 = new FileModel("alfresco.txt", "alfresco", "alfresco", FileType.TEXT_PLAIN,
|
||||
file3 = new FileModel("alfresco.docx", "alfresco", "alfresco", FileType.TEXT_PLAIN,
|
||||
"Alfresco text file for search ");
|
||||
|
||||
file4 = new FileModel(unique_searchString + ".txt", "uniquee" + title, description, FileType.TEXT_PLAIN,
|
||||
file4 = new FileModel(unique_searchString + ".ODT", "uniquee" + title, description, FileType.TEXT_PLAIN,
|
||||
"Unique text file for search ");
|
||||
|
||||
|
||||
of(file, file2, file3, file4).forEach(
|
||||
f -> dataContent.usingUser(testUser).usingSite(testSite).usingResource(folder).createContent(f)
|
||||
);
|
||||
|
||||
);
|
||||
waitForMetadataIndexing(file4.getName(), true);
|
||||
}
|
||||
|
||||
protected FileModel createFileWithProvidedText(String filename, String providedText) throws InterruptedException
|
||||
{
|
||||
String title = "Title: File containing " + providedText;
|
||||
String description = "Description: Contains provided string: " + providedText;
|
||||
FileModel uniqueFile = new FileModel(filename, title, description, FileType.TEXT_PLAIN,
|
||||
"The content " + providedText + " is a provided string");
|
||||
dataContent.usingUser(testUser).usingSite(testSite).usingResource(folder).createContent(uniqueFile);
|
||||
Assert.assertTrue(waitForContentIndexing(providedText, true));
|
||||
|
||||
return uniqueFile;
|
||||
}
|
||||
|
||||
protected SearchResponse queryUntilResponseEntriesListNotEmpty(UserModel user, String queryString)
|
||||
{
|
||||
SearchResponse response = queryUntilStatusIsOk(user, queryString);
|
||||
if (restClient.getStatusCode().matches(HttpStatus.OK.toString()))
|
||||
{
|
||||
for (int readAttempts = 0; readAttempts < MAX_ATTEMPTS_TO_READ_RESPONSE; readAttempts++)
|
||||
{
|
||||
if (!response.isEmpty())
|
||||
{
|
||||
return response;
|
||||
}
|
||||
Utility.waitToLoopTime(MAX_WAIT_IN_SECONDS_BEFORE_REREAD_RESPONSE, "Re-reading empty response. Retry Attempt: " + (readAttempts + 1));
|
||||
}
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
private SearchResponse queryUntilStatusIsOk(UserModel user, String queryString)
|
||||
{
|
||||
// Repeat query until status is OK or Query Retry limit is hit
|
||||
for (int queryAttempts = 0; queryAttempts < MAX_ATTEMPTS_TO_RETRY_QUERY - 1; queryAttempts++)
|
||||
{
|
||||
try
|
||||
{
|
||||
SearchResponse response = queryAsUser(user, queryString);
|
||||
if (restClient.getStatusCode().matches(HttpStatus.OK.toString()))
|
||||
{
|
||||
return response;
|
||||
}
|
||||
|
||||
// Wait for pipeline to calm down
|
||||
Utility.waitToLoopTime(MAX_WAIT_IN_SECONDS_BEFORE_RETRY_QUERY, "Re-trying query for valid status code. Retry Attempt: " + (queryAttempts + 1));
|
||||
}
|
||||
catch (EmptyJsonResponseException ignore)
|
||||
{
|
||||
}
|
||||
}
|
||||
// Final attempt
|
||||
return queryAsUser(user, queryString);
|
||||
}
|
||||
}
|
||||
|
@@ -150,7 +150,7 @@ public class FacetIntervalSearchTest extends AbstractSearchServicesE2ETest
|
||||
bucket = facetResponseModel.getBuckets().get(1);
|
||||
|
||||
bucket.assertThat().field("label").is("theRest");
|
||||
bucket.assertThat().field("filterQuery").is("creator:<\"user\" TO \"z\"]");
|
||||
bucket.assertThat().field("filterQuery").is("creator:[\"user\" TO \"z\"]");
|
||||
bucket.getMetrics().get(0).assertThat().field("type").is("count");
|
||||
bucket.getMetrics().get(0).assertThat().field("value").is("{count=0}");
|
||||
}
|
||||
@@ -196,7 +196,7 @@ public class FacetIntervalSearchTest extends AbstractSearchServicesE2ETest
|
||||
bucket = facetResponseModel.getBuckets().get(1);
|
||||
|
||||
bucket.assertThat().field("label").is("Before2016");
|
||||
bucket.assertThat().field("filterQuery").is("cm:modified:[\"*\" TO \"2016\">");
|
||||
bucket.assertThat().field("filterQuery").is("cm:modified:[\"*\" TO \"2016\"]");
|
||||
bucket.getMetrics().get(0).assertThat().field("type").is("count");
|
||||
bucket.getMetrics().get(0).assertThat().field("value").is("{count=0}");
|
||||
}
|
||||
|
@@ -27,7 +27,9 @@
|
||||
package org.alfresco.test.search.functional.searchServices.search;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.rest.search.FacetFieldBucket;
|
||||
import org.alfresco.rest.search.FacetQuery;
|
||||
@@ -40,8 +42,12 @@ import org.alfresco.rest.search.RestResultBucketsModel;
|
||||
import org.alfresco.rest.search.SearchRequest;
|
||||
import org.alfresco.rest.search.SearchResponse;
|
||||
import org.alfresco.search.TestGroup;
|
||||
import org.alfresco.utility.model.FileModel;
|
||||
import org.alfresco.utility.model.FileType;
|
||||
import org.alfresco.utility.testrail.ExecutionType;
|
||||
import org.alfresco.utility.testrail.annotation.TestRail;
|
||||
import org.apache.chemistry.opencmis.commons.PropertyIds;
|
||||
import org.apache.chemistry.opencmis.commons.enums.VersioningState;
|
||||
import org.testng.Assert;
|
||||
import org.testng.TestException;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
@@ -104,7 +110,7 @@ public class FacetedSearchTest extends AbstractSearchServicesE2ETest
|
||||
* }}
|
||||
*/
|
||||
@BeforeClass(alwaysRun = true)
|
||||
public void dataPreparation() throws Exception
|
||||
public void dataPreparation()
|
||||
{
|
||||
searchServicesDataPreparation();
|
||||
waitForContentIndexing(file4.getContent(), true);
|
||||
@@ -112,7 +118,7 @@ public class FacetedSearchTest extends AbstractSearchServicesE2ETest
|
||||
|
||||
@Test(groups={TestGroup.CONFIG_ENABLED_CASCADE_TRACKER})
|
||||
@TestRail(section = { TestGroup.REST_API, TestGroup.SEARCH}, executionType = ExecutionType.REGRESSION, description = "Checks facet queries for the Search api")
|
||||
public void searchWithQueryFaceting() throws Exception
|
||||
public void searchWithQueryFaceting()
|
||||
{
|
||||
SearchRequest query = new SearchRequest();
|
||||
RestRequestQueryModel queryReq = new RestRequestQueryModel();
|
||||
@@ -139,16 +145,22 @@ public class FacetedSearchTest extends AbstractSearchServicesE2ETest
|
||||
FacetFieldBucket facet = response.getContext().getFacetQueries().get(0);
|
||||
facet.assertThat().field("label").contains("small").and().field("count").isGreaterThan(0);
|
||||
facet.assertThat().field("label").contains("small").and().field("filterQuery").is("content.size:[0 TO 102400]");
|
||||
response.getContext().getFacetQueries().get(1).assertThat().field("label").contains("large")
|
||||
.and().field("count").isLessThan(1)
|
||||
.and().field("filterQuery").is("content.size:[1048576 TO 16777216]");
|
||||
response.getContext().getFacetQueries().get(2).assertThat().field("label").contains("medium")
|
||||
.and().field("count").isLessThan(1)
|
||||
.and().field("filterQuery").is("content.size:[102400 TO 1048576]");
|
||||
//We don't expect to see the FacetFields if group is being used.
|
||||
Assert.assertEquals(response.getContext().getFacetQueries().size(), 1, "Results with count=0 must be omitted");
|
||||
|
||||
// We don't expect to see the FacetFields if group is being used.
|
||||
Assert.assertNull(response.getContext().getFacetsFields());
|
||||
Assert.assertNull(response.getContext().getFacets());
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify this query is returning the same results for both single server and shard environments.
|
||||
*/
|
||||
@Test(groups={TestGroup.CONFIG_SHARDING})
|
||||
@TestRail(section = { TestGroup.REST_API, TestGroup.SEARCH}, executionType = ExecutionType.ACCEPTANCE, description = "Checks facet queries for the Search api in Shard environments")
|
||||
public void searchWithQueryFacetingCluster()
|
||||
{
|
||||
searchWithQueryFaceting();
|
||||
}
|
||||
|
||||
/**
|
||||
* * Perform a group by faceting, below test groups the facet by group name foo.
|
||||
@@ -192,7 +204,7 @@ public class FacetedSearchTest extends AbstractSearchServicesE2ETest
|
||||
@Test
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.SEARCH }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Checks facet queries for the Search api")
|
||||
public void searchQueryFacetingWithGroup() throws Exception
|
||||
public void searchQueryFacetingWithGroup()
|
||||
{
|
||||
SearchRequest query = new SearchRequest();
|
||||
RestRequestQueryModel queryReq = new RestRequestQueryModel();
|
||||
@@ -257,7 +269,7 @@ public class FacetedSearchTest extends AbstractSearchServicesE2ETest
|
||||
@Test
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.SEARCH }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Checks facet queries for the Search api")
|
||||
public void searchWithFactedFields() throws Exception
|
||||
public void searchWithFactedFields()
|
||||
{
|
||||
SearchRequest query = new SearchRequest();
|
||||
RestRequestQueryModel queryReq = new RestRequestQueryModel();
|
||||
@@ -303,7 +315,7 @@ public class FacetedSearchTest extends AbstractSearchServicesE2ETest
|
||||
@Test
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.SEARCH }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Checks facet queries for the Search api")
|
||||
public void searchWithFactedFieldsFacetFormatV2() throws Exception
|
||||
public void searchWithFactedFieldsFacetFormatV2()
|
||||
{
|
||||
SearchRequest query = new SearchRequest();
|
||||
RestRequestQueryModel queryReq = new RestRequestQueryModel();
|
||||
@@ -330,6 +342,94 @@ public class FacetedSearchTest extends AbstractSearchServicesE2ETest
|
||||
bucket1.assertThat().field("label").is(testUser.getUsername());
|
||||
bucket1.assertThat().field("display").is("FN-" + testUser.getUsername() + " LN-" + testUser.getUsername());
|
||||
bucket1.assertThat().field("filterQuery").is("modifier:\"" + testUser.getUsername() + "\"");
|
||||
bucket1.assertThat().field("metrics").is("[{entry=null, type=count, value={count=1}}]");
|
||||
bucket1.assertThat().field("metrics.entry").is("[null]")
|
||||
.and().field("metrics.type").is("[count]")
|
||||
.and().field("metrics.value").is("[{count=1}]");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that facet fields return results for single and multivalued fields.
|
||||
* {
|
||||
* "query": {
|
||||
* "query": "cm:addressee:'first'"
|
||||
* },
|
||||
* "facetFields": {
|
||||
* "facets": [{"field": "cm:addressee"}, {"field": "cm:addressees"}]
|
||||
* },
|
||||
* "facetFormat":"V2"
|
||||
* }
|
||||
*/
|
||||
@Test
|
||||
@TestRail(section = {TestGroup.REST_API, TestGroup.SEARCH }, executionType = ExecutionType.REGRESSION,
|
||||
description = "Checks facet queries for the Search api, single and multi-valued properties")
|
||||
public void searchWithMultiValuedFieldsFacet()
|
||||
{
|
||||
|
||||
// Create properties with single (cm:addressee) and multi-valued (cm:addressees) values
|
||||
FileModel emailFile = FileModel.getRandomFileModel(FileType.TEXT_PLAIN, "Email");
|
||||
|
||||
Map<String, Object> properties = new HashMap<>();
|
||||
properties.put(PropertyIds.OBJECT_TYPE_ID, "cmis:document");
|
||||
properties.put(PropertyIds.NAME, emailFile.getName());
|
||||
properties.put(PropertyIds.SECONDARY_OBJECT_TYPE_IDS, List.of("P:cm:emailed"));
|
||||
properties.put("cm:addressee", "first");
|
||||
properties.put("cm:addressees", List.of("first", "second"));
|
||||
|
||||
cmisApi.authenticateUser(testUser)
|
||||
.usingSite(testSite)
|
||||
.usingResource(folder)
|
||||
.createFile(emailFile, properties, VersioningState.MAJOR)
|
||||
.assertThat().existsInRepo();
|
||||
|
||||
String addresseeQuery = "cm:addressee:'first'";
|
||||
Assert.assertTrue(waitForIndexing(addresseeQuery, true));
|
||||
|
||||
// Search facets fields cm:addressee and cm:addressess
|
||||
SearchRequest query = new SearchRequest();
|
||||
RestRequestQueryModel queryReq = new RestRequestQueryModel();
|
||||
queryReq.setQuery("cm:addressee:'first'");
|
||||
query.setQuery(queryReq);
|
||||
query.setFacetFormat("V2");
|
||||
RestRequestFacetFieldsModel facetFields = new RestRequestFacetFieldsModel();
|
||||
List<RestRequestFacetFieldModel> facets = new ArrayList<>();
|
||||
facets.add(new RestRequestFacetFieldModel("cm:addressee"));
|
||||
facets.add(new RestRequestFacetFieldModel("cm:addressees"));
|
||||
facetFields.setFacets(facets);
|
||||
query.setFacetFields(facetFields);
|
||||
|
||||
SearchResponse response = query(query);
|
||||
|
||||
// Verify results
|
||||
Assert.assertNull(response.getContext().getFacetsFields());
|
||||
Assert.assertNull(response.getContext().getFacetQueries());
|
||||
Assert.assertFalse(response.getContext().getFacets().isEmpty());
|
||||
|
||||
// Facets for cm:addressees (multi-valued)
|
||||
RestGenericFacetResponseModel model = response.getContext().getFacets().get(0);
|
||||
Assert.assertEquals(model.getLabel(), "cm:addressees");
|
||||
model.assertThat().field("label").is("cm:addressees");
|
||||
RestGenericBucketModel bucket = model.getBuckets().get(0);
|
||||
bucket.assertThat().field("label").is("{en}first");
|
||||
bucket.assertThat().field("filterQuery").is("cm:addressees:\"{en}first\"");
|
||||
bucket.assertThat().field("metrics.entry").is("[null]")
|
||||
.and().field("metrics.type").is("[count]")
|
||||
.and().field("metrics.value").is("[{count=1}]");
|
||||
bucket = model.getBuckets().get(1);
|
||||
bucket.assertThat().field("label").is("{en}second");
|
||||
bucket.assertThat().field("filterQuery").is("cm:addressees:\"{en}second\"");
|
||||
bucket.assertThat().field("metrics.entry").is("[null]")
|
||||
.and().field("metrics.type").is("[count]")
|
||||
.and().field("metrics.value").is("[{count=1}]");
|
||||
|
||||
// Facets for cm:addressee (singel valued)
|
||||
model = response.getContext().getFacets().get(1);
|
||||
Assert.assertEquals(model.getLabel(), "cm:addressee");
|
||||
model.assertThat().field("label").is("cm:addressee");
|
||||
bucket = model.getBuckets().get(0);
|
||||
bucket.assertThat().field("label").is("{en}first");
|
||||
bucket.assertThat().field("filterQuery").is("cm:addressee:\"{en}first\"");
|
||||
bucket.assertThat().field("metrics.entry").is("[null]")
|
||||
.and().field("metrics.type").is("[count]")
|
||||
.and().field("metrics.value").is("[{count=1}]");
|
||||
}
|
||||
}
|
||||
|
@@ -26,6 +26,9 @@
|
||||
|
||||
package org.alfresco.test.search.functional.searchServices.search;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.rest.search.SearchResponse;
|
||||
import org.alfresco.utility.data.CustomObjectTypeProperties;
|
||||
import org.alfresco.utility.model.FileModel;
|
||||
@@ -40,7 +43,7 @@ import org.testng.annotations.Test;
|
||||
*/
|
||||
public class FieldDefinitionTest extends AbstractSearchServicesE2ETest {
|
||||
|
||||
private FileModel File1, File2;
|
||||
private FileModel File1, File2, File3;
|
||||
|
||||
@BeforeClass(alwaysRun = true)
|
||||
public void dataPreparation() throws Exception
|
||||
@@ -66,9 +69,23 @@ public class FieldDefinitionTest extends AbstractSearchServicesE2ETest {
|
||||
.updateProperty("allfieldtypes:textPatternMany", "mltext field definition test")
|
||||
.updateProperty("allfieldtypes:textLOVWhole", "text field not tokenised")
|
||||
.updateProperty("allfieldtypes:mltextLOVWhole", "mltext field not tokenised");
|
||||
|
||||
|
||||
File3 = new FileModel("standard-file3.txt");
|
||||
|
||||
dataContent.usingUser(testUser).usingSite(testSite).createCustomContent(File3, "cmis:document",
|
||||
new CustomObjectTypeProperties());
|
||||
|
||||
List<String> mlMultipleValue = new ArrayList<String>();
|
||||
mlMultipleValue.add("oranges");
|
||||
mlMultipleValue.add("apples");
|
||||
mlMultipleValue.add("pears");
|
||||
|
||||
cmisApi.authenticateUser(testUser).usingResource(File3).addSecondaryTypes("P:allfieldtypes:text")
|
||||
.updateProperty("allfieldtypes:multiplemltext", mlMultipleValue);
|
||||
|
||||
waitForMetadataIndexing(File1.getName(), true);
|
||||
waitForMetadataIndexing(File2.getName(), true);
|
||||
waitForMetadataIndexing(File3.getName(), true);
|
||||
}
|
||||
|
||||
// A test to test the text field in the solr schema, using a single word
|
||||
@@ -198,4 +215,21 @@ public class FieldDefinitionTest extends AbstractSearchServicesE2ETest {
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
Assert.assertEquals(response.getPagination().getCount(), 0);
|
||||
}
|
||||
|
||||
// A test having multiple mltext field in the solr schema
|
||||
@Test(priority = 8)
|
||||
public void testmlTextFieldMuliple()
|
||||
{
|
||||
SearchResponse response = queryAsUser(testUser, "allfieldtypes_multiplemltext:\"orange\"");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
Assert.assertEquals(response.getPagination().getCount(), 1);
|
||||
|
||||
response = queryAsUser(testUser, "allfieldtypes_multiplemltext:\"apple\"");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
Assert.assertEquals(response.getPagination().getCount(), 1);
|
||||
|
||||
response = queryAsUser(testUser, "allfieldtypes_multiplemltext:\"pear\"");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
Assert.assertEquals(response.getPagination().getCount(), 1);
|
||||
}
|
||||
}
|
||||
|
@@ -29,6 +29,8 @@ package org.alfresco.test.search.functional.searchServices.search;
|
||||
import static java.util.List.of;
|
||||
|
||||
import static jersey.repackaged.com.google.common.collect.Sets.newHashSet;
|
||||
import static org.alfresco.search.TestGroup.ACS_701n;
|
||||
import static org.alfresco.search.TestGroup.CROSS_LOCALE_SUPPORT_DISABLED;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
@@ -37,12 +39,15 @@ import java.util.stream.Stream;
|
||||
|
||||
import org.alfresco.rest.model.RestNodeAssociationModelCollection;
|
||||
import org.alfresco.rest.model.RestNodeChildAssociationModel;
|
||||
import org.alfresco.rest.search.RestRequestQueryModel;
|
||||
import org.alfresco.rest.search.SearchRequest;
|
||||
import org.alfresco.rest.search.SearchResponse;
|
||||
import org.alfresco.search.TestGroup;
|
||||
import org.alfresco.test.search.functional.AbstractE2EFunctionalTest;
|
||||
import org.alfresco.utility.model.FileModel;
|
||||
import org.alfresco.utility.model.FileType;
|
||||
import org.alfresco.utility.model.FolderModel;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.testng.Assert;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
@@ -137,13 +142,13 @@ public class SearchAFTSInFieldTest extends AbstractE2EFunctionalTest
|
||||
boolean fileFound = isContentInSearchResults(query, file2.getName(), true);
|
||||
Assert.assertTrue(fileFound, "File Not found for query: " + query);
|
||||
|
||||
testSearchQuery(query, 2, SearchLanguage.AFTS);
|
||||
testSearchQuery(query, 1, SearchLanguage.AFTS);
|
||||
|
||||
query = fieldName + ":'" + file2.getName() + "\'";
|
||||
fileFound = isContentInSearchResults(query, file2.getName(), true);
|
||||
Assert.assertTrue(fileFound, "File Not found for query: " + query);
|
||||
|
||||
testSearchQuery(query, 2, SearchLanguage.AFTS);
|
||||
testSearchQuery(query, 1, SearchLanguage.AFTS);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -342,4 +347,30 @@ public class SearchAFTSInFieldTest extends AbstractE2EFunctionalTest
|
||||
fileFound = isContentInSearchResponse(response, file3.getName());
|
||||
Assert.assertFalse(fileFound, "File3 found for query: " + query);
|
||||
}
|
||||
|
||||
/** Check that a 501 error is returned when performing exact search on a tokenised field without cross-locale support. */
|
||||
@Test (priority = 12, groups = { CROSS_LOCALE_SUPPORT_DISABLED, ACS_701n })
|
||||
public void testExactMatchWithoutCrossLocale()
|
||||
{
|
||||
// Force the query to hit Solr rather than the DB.
|
||||
String query = "=cm:title:test and cm:name:*";
|
||||
RestRequestQueryModel queryModel = new RestRequestQueryModel();
|
||||
queryModel.setQuery(query);
|
||||
SearchRequest searchRequest = new SearchRequest(queryModel);
|
||||
restClient.authenticateUser(dataUser.getAdminUser()).withSearchAPI().search(searchRequest);
|
||||
restClient.assertStatusCodeIs(HttpStatus.NOT_IMPLEMENTED);
|
||||
}
|
||||
|
||||
/** Check that a 200 success is returned when performing exact search against the DB (even on a tokenised field without cross-locale support). */
|
||||
@Test (priority = 13)
|
||||
public void testExactMatchAgainstDB()
|
||||
{
|
||||
// Using a simple query we will hit the DB.
|
||||
String query = "=cm:title:test";
|
||||
RestRequestQueryModel queryModel = new RestRequestQueryModel();
|
||||
queryModel.setQuery(query);
|
||||
SearchRequest searchRequest = new SearchRequest(queryModel);
|
||||
restClient.authenticateUser(dataUser.getAdminUser()).withSearchAPI().search(searchRequest);
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,187 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Search Services E2E Test
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
package org.alfresco.test.search.functional.searchServices.search;
|
||||
|
||||
import static org.testng.Assert.assertEquals;
|
||||
import static org.testng.Assert.assertNull;
|
||||
import static org.testng.Assert.assertTrue;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.rest.search.RestRequestQueryModel;
|
||||
import org.alfresco.rest.search.SearchRequest;
|
||||
import org.alfresco.rest.search.SearchResponse;
|
||||
import org.alfresco.test.search.functional.AbstractE2EFunctionalTest;
|
||||
import org.alfresco.utility.data.CustomObjectTypeProperties;
|
||||
import org.alfresco.utility.model.FileModel;
|
||||
import org.alfresco.utility.model.FileType;
|
||||
import org.alfresco.utility.model.FolderModel;
|
||||
import org.apache.chemistry.opencmis.client.api.Document;
|
||||
import org.apache.chemistry.opencmis.commons.PropertyIds;
|
||||
import org.apache.chemistry.opencmis.commons.enums.VersioningState;
|
||||
import org.testng.Assert;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import javax.validation.constraints.AssertTrue;
|
||||
|
||||
/**
|
||||
* Test class tests aspects are added and removed from Solr Documents
|
||||
* Created for Search-2379
|
||||
*/
|
||||
public class SearchAspectTest extends AbstractE2EFunctionalTest
|
||||
{
|
||||
private FolderModel folder;
|
||||
private FileModel file;
|
||||
|
||||
@BeforeClass(alwaysRun = true)
|
||||
public void dataPreparation()
|
||||
{
|
||||
folder = new FolderModel("folder-aspect");
|
||||
|
||||
file = new FileModel("file-aspect.txt");
|
||||
file.setContent("content file aspect");
|
||||
|
||||
dataContent.usingUser(testUser).usingSite(testSite).createCustomContent(folder, "cmis:folder",
|
||||
new CustomObjectTypeProperties());
|
||||
|
||||
dataContent.usingUser(testUser).usingResource(folder).createCustomContent(file, "cmis:document",
|
||||
new CustomObjectTypeProperties());
|
||||
|
||||
waitForMetadataIndexing(file.getName(), true);
|
||||
|
||||
assertTrue(deployCustomModel("model/finance-model.xml"),
|
||||
"failing while deploying model");
|
||||
}
|
||||
|
||||
@Test(priority = 1)
|
||||
public void testAspectIsRemoved() throws Exception
|
||||
{
|
||||
|
||||
// When checking out a file, cm:checkedOut aspect is added
|
||||
cmisApi.authenticateUser(testUser).usingResource(file).checkOut();
|
||||
|
||||
String queryFile = "cm:name:'" + file.getName() + "'";
|
||||
|
||||
RestRequestQueryModel queryModel = new RestRequestQueryModel();
|
||||
queryModel.setQuery(queryFile);
|
||||
queryModel.setLanguage(SearchLanguage.AFTS.toString());
|
||||
SearchRequest searchRequest = new SearchRequest();
|
||||
searchRequest.setQuery(queryModel);
|
||||
searchRequest.setInclude(List.of("aspectNames"));
|
||||
|
||||
SearchResponse response = restClient.authenticateUser(testUser).withSearchAPI().search(searchRequest);
|
||||
assertTrue(response.getEntries().get(0).getModel().getAspectNames().contains("cm:checkedOut"),
|
||||
"checkedOut aspect expected");
|
||||
|
||||
// When cancelling the check out of a file, cm:checkedOut aspect is removed
|
||||
cmisApi.authenticateUser(testUser).usingResource(file).cancelCheckOut();
|
||||
cmisApi.authenticateUser(testUser).usingResource(file).updateProperty(PropertyIds.NAME,
|
||||
"file-aspect-random.txt");
|
||||
|
||||
waitForMetadataIndexing("file-aspect-random.txt", true);
|
||||
|
||||
queryModel.setQuery("cm:name:'file-aspect-random.txt'");
|
||||
searchRequest.setQuery(queryModel);
|
||||
response = restClient.authenticateUser(testUser).withSearchAPI().search(searchRequest);
|
||||
|
||||
assertTrue(!response.getEntries().get(0).getModel().getAspectNames().contains("cm:checkedOut"),
|
||||
"checkedOut aspect was NOT expected");
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that when an aspect is removed, all the properties defined in the aspect are removed as well.
|
||||
* Created for Search-2538
|
||||
*/
|
||||
@Test(priority = 2)
|
||||
public void testAspectIsRemovedWithItsProperties()
|
||||
{
|
||||
String parkingLocationFieldName = "finance:ParkingLocation";
|
||||
String financeLocationFieldName = "finance:Location";
|
||||
|
||||
FileModel expenseLondon = FileModel.getRandomFileModel(FileType.TEXT_PLAIN, "Expense");
|
||||
|
||||
Map<String, Object> properties = new HashMap<>();
|
||||
properties.put(PropertyIds.OBJECT_TYPE_ID, "D:finance:Expense");
|
||||
properties.put(PropertyIds.NAME, expenseLondon.getName());
|
||||
properties.put(PropertyIds.SECONDARY_OBJECT_TYPE_IDS, List.of("P:finance:ParkEx"));
|
||||
properties.put(financeLocationFieldName, "LondonBridge");
|
||||
properties.put(parkingLocationFieldName, "LiverpoolStreet");
|
||||
|
||||
// Compose query
|
||||
String queryFile = "cm:name:'" + expenseLondon.getName() + "'";
|
||||
RestRequestQueryModel queryModel = new RestRequestQueryModel();
|
||||
queryModel.setQuery(queryFile);
|
||||
queryModel.setLanguage(SearchLanguage.AFTS.toString());
|
||||
SearchRequest searchRequest = new SearchRequest();
|
||||
searchRequest.setQuery(queryModel);
|
||||
searchRequest.setInclude(List.of("aspectNames","properties"));
|
||||
|
||||
cmisApi.authenticateUser(testUser)
|
||||
.usingSite(testSite)
|
||||
.usingResource(folder)
|
||||
.createFile(expenseLondon, properties, VersioningState.MAJOR)
|
||||
.assertThat().existsInRepo();
|
||||
|
||||
String parkingLocationQuery = parkingLocationFieldName + ":LiverpoolStreet";
|
||||
Assert.assertTrue(waitForIndexing(parkingLocationQuery, true));
|
||||
|
||||
// check that the document found has the expected properties and aspects defined.
|
||||
SearchResponse response = restClient.authenticateUser(testUser).withSearchAPI().search(searchRequest);
|
||||
assertTrue(response.getEntries().get(0).getModel().getAspectNames().contains("finance:ParkEx"),
|
||||
"parkEx aspect was expected");
|
||||
Map<String, String> foundProperties = (Map<String, String>) response.getEntries().get(0).getModel().getProperties();
|
||||
assertEquals("LondonBridge", foundProperties.get(financeLocationFieldName),
|
||||
"finance:Location property is expected to be defined with 'LondonBridge' as value");
|
||||
assertEquals("LiverpoolStreet", foundProperties.get(parkingLocationFieldName),
|
||||
"finance:ParkingLocation property is expected to be defined with 'LiverpoolStreet' as value");
|
||||
|
||||
// remove aspect
|
||||
Document doc = cmisApi.withCMISUtil().getCmisDocument(cmisApi.getLastResource());
|
||||
List<Object> aspects = doc.getProperty(PropertyIds.SECONDARY_OBJECT_TYPE_IDS).getValues();
|
||||
aspects.remove("P:finance:ParkEx");
|
||||
Map<String, Object> updateProperties = new HashMap<>();
|
||||
updateProperties.put(PropertyIds.SECONDARY_OBJECT_TYPE_IDS, aspects);
|
||||
|
||||
doc.updateProperties(updateProperties);
|
||||
|
||||
// Check that the field related to the removed aspect is not longer indexed for tested file
|
||||
Assert.assertTrue(waitForIndexing(parkingLocationQuery, false));
|
||||
|
||||
response = restClient.authenticateUser(testUser).withSearchAPI().search(searchRequest);
|
||||
assertTrue(!response.getEntries().get(0).getModel().getAspectNames().contains("finance:ParkEx"),
|
||||
"parkEx aspect was NOT expected");
|
||||
|
||||
// check that the document found has finance:Location property defined and finance:ParkingLocation has been removed
|
||||
foundProperties = (Map<String, String>) response.getEntries().get(0).getModel().getProperties();
|
||||
assertEquals("LondonBridge", foundProperties.get(financeLocationFieldName),
|
||||
"finance:Location property is expected to be defined with 'LondonBridge' as value");
|
||||
assertNull(foundProperties.get(parkingLocationFieldName), "finance:ParkingLocation should not be included " +
|
||||
"into the document anymore");
|
||||
|
||||
}
|
||||
}
|
@@ -0,0 +1,259 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Search Services E2E Test
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2023 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.test.search.functional.searchServices.search;
|
||||
|
||||
import org.alfresco.rest.search.FacetFieldBucket;
|
||||
import org.alfresco.rest.search.RestRequestFacetFieldModel;
|
||||
import org.alfresco.rest.search.RestRequestFacetFieldsModel;
|
||||
import org.alfresco.rest.search.RestRequestQueryModel;
|
||||
import org.alfresco.rest.search.RestResultBucketsModel;
|
||||
import org.alfresco.rest.search.SearchRequest;
|
||||
import org.alfresco.rest.search.SearchResponse;
|
||||
import org.alfresco.utility.Utility;
|
||||
import org.alfresco.utility.model.FileModel;
|
||||
import org.alfresco.utility.model.FileType;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.testng.Assert;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class SearchCasesTest extends AbstractSearchServicesE2ETest
|
||||
{
|
||||
@BeforeClass(alwaysRun = true)
|
||||
public void dataPreparation() throws Exception
|
||||
{
|
||||
searchServicesDataPreparation();
|
||||
Assert.assertTrue(waitForContentIndexing(file4.getContent(), true));
|
||||
}
|
||||
|
||||
@Test(priority=1)
|
||||
public void testSearchNameField()
|
||||
{
|
||||
SearchResponse response = queryAsUser(testUser, "cm:name:pangram");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
response.assertThat().entriesListIsNotEmpty();
|
||||
}
|
||||
|
||||
@Test(priority=2)
|
||||
public void testSearchTitleField()
|
||||
{
|
||||
SearchResponse response2 = queryAsUser(testUser, "cm:title:cars");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
response2.assertThat().entriesListIsNotEmpty();
|
||||
}
|
||||
|
||||
@Test(priority=3)
|
||||
public void testSearchDescriptionField()
|
||||
{
|
||||
SearchResponse response3 = queryAsUser(testUser, "cm:description:alfresco");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
response3.assertThat().entriesListIsNotEmpty();
|
||||
}
|
||||
|
||||
@Test(priority=4)
|
||||
public void testSearchTextFile()
|
||||
{
|
||||
SearchResponse response6 = queryAsUser(testUser, "cm:name:pangram.txt");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
response6.assertThat().entriesListIsNotEmpty();
|
||||
}
|
||||
|
||||
@Test(priority=5)
|
||||
public void testSearchPDFFile()
|
||||
{
|
||||
SearchResponse response6 = queryAsUser(testUser, "cm:name:cars.PDF");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
response6.assertThat().entriesListIsNotEmpty();
|
||||
}
|
||||
|
||||
@Test(priority=6)
|
||||
public void testSearchODTFile()
|
||||
{
|
||||
SearchResponse response6 = queryAsUser(testUser, "cm:name:unique.ODT");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
response6.assertThat().entriesListIsNotEmpty();
|
||||
}
|
||||
|
||||
@Test(priority=7)
|
||||
public void testSearchPhraseQueries()
|
||||
{
|
||||
SearchResponse response6 = queryAsUser(testUser, "The quick brown fox jumps over the lazy dog");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
response6.assertThat().entriesListIsNotEmpty();
|
||||
}
|
||||
|
||||
@Test(priority=8)
|
||||
public void testSearchExactTermQueries()
|
||||
{
|
||||
SearchResponse response6 = queryAsUser(testUser, "=alfresco");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
response6.assertThat().entriesListIsNotEmpty();
|
||||
}
|
||||
|
||||
@Test(priority=9)
|
||||
public void testSearchConjunctionQueries()
|
||||
{
|
||||
SearchResponse response6 = queryAsUser(testUser, "unique AND search");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
response6.assertThat().entriesListIsNotEmpty();
|
||||
}
|
||||
|
||||
@Test(priority=10)
|
||||
public void testSearchDisjunctionQueries()
|
||||
{
|
||||
SearchResponse response6 = queryAsUser(testUser, "file OR discovery");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
response6.assertThat().entriesListIsNotEmpty();
|
||||
}
|
||||
|
||||
@Test(priority=11)
|
||||
public void testSearchNegationQueries()
|
||||
{
|
||||
SearchResponse response6 = queryAsUser(testUser, "pangram NOT pan");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
response6.assertThat().entriesListIsNotEmpty();
|
||||
}
|
||||
|
||||
@Test(priority=12)
|
||||
public void testSearchWildcardQueries()
|
||||
{
|
||||
SearchResponse response6 = queryAsUser(testUser, "al?res*");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
response6.assertThat().entriesListIsNotEmpty();
|
||||
}
|
||||
|
||||
@Test(priority=13)
|
||||
public void testSearchUpdateContent() throws InterruptedException
|
||||
{
|
||||
String originalText = String.valueOf(System.currentTimeMillis());
|
||||
String newText = String.valueOf(System.currentTimeMillis() + 300000);
|
||||
|
||||
// Create test file to be accessed only by this test method to avoid inconsistent results when querying updates
|
||||
FileModel updateableFile = createFileWithProvidedText(originalText + ".txt", originalText);
|
||||
|
||||
// Verify that 1 occurrence of the original text is found
|
||||
SearchResponse response1 = queryAsUser(testUser, "cm:content:" + originalText);
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
Assert.assertEquals(response1.getEntries().size(), 1, "Expected 1 original text before update");
|
||||
|
||||
// Verify that 0 occurrences of the replacement text are found
|
||||
SearchResponse response2 = queryAsUser(testUser, "cm:content:" + newText);
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
Assert.assertEquals(response2.getEntries().size(), 0, "Expected 0 new text before update");
|
||||
|
||||
// Update the content
|
||||
String newContent = "Description: Contains provided string: " + newText;
|
||||
dataContent.usingUser(adminUserModel).usingSite(testSite).usingResource(updateableFile)
|
||||
.updateContent(newContent);
|
||||
Assert.assertTrue(waitForContentIndexing(newText, true));
|
||||
|
||||
// Verify that 0 occurrences of the original text are found
|
||||
SearchResponse response3 = queryAsUser(testUser, "cm:content:" + originalText);
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
Assert.assertEquals(response3.getEntries().size(), 0, "Expected 0 original text after update");
|
||||
|
||||
// Verify that 1 occurrence of the replacement text is found
|
||||
SearchResponse response4 = queryAsUser(testUser, "cm:content:" + newText);
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
Assert.assertEquals(response4.getEntries().size(), 1, "Expected 1 new text before update");
|
||||
}
|
||||
|
||||
/**
|
||||
* {
|
||||
* "query": {
|
||||
* "query": "*"
|
||||
* },
|
||||
* "facetFields": {
|
||||
* "facets": [{"field": "cm:mimetype"},{"field": "modifier"}]
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
@Test(priority=14)
|
||||
public void searchWithFacedFields() throws InterruptedException
|
||||
{
|
||||
String uniqueText = String.valueOf(System.currentTimeMillis());
|
||||
|
||||
// Create test file to be accessed only by this test method to avoid inconsistent results
|
||||
createFileWithProvidedText(uniqueText + ".ODT", uniqueText);
|
||||
|
||||
SearchRequest query = new SearchRequest();
|
||||
RestRequestQueryModel queryReq = new RestRequestQueryModel();
|
||||
queryReq.setQuery("cm:content:" + uniqueText);
|
||||
query.setQuery(queryReq);
|
||||
|
||||
RestRequestFacetFieldsModel facetFields = new RestRequestFacetFieldsModel();
|
||||
List<RestRequestFacetFieldModel> facets = new ArrayList<>();
|
||||
facets.add(new RestRequestFacetFieldModel("cm:mimetype"));
|
||||
facets.add(new RestRequestFacetFieldModel("modifier"));
|
||||
facetFields.setFacets(facets);
|
||||
query.setFacetFields(facetFields);
|
||||
|
||||
SearchResponse response = query(query);
|
||||
|
||||
Assert.assertNotNull(response.getContext().getFacetsFields());
|
||||
Assert.assertFalse(response.getContext().getFacetsFields().isEmpty());
|
||||
Assert.assertNull(response.getContext().getFacetQueries());
|
||||
Assert.assertNull(response.getContext().getFacets());
|
||||
|
||||
RestResultBucketsModel model = response.getContext().getFacetsFields().get(0);
|
||||
Assert.assertEquals(model.getLabel(), "modifier");
|
||||
|
||||
model.assertThat().field("label").is("modifier");
|
||||
FacetFieldBucket bucket1 = model.getBuckets().get(0);
|
||||
bucket1.assertThat().field("label").is(testUser.getUsername());
|
||||
bucket1.assertThat().field("display").is("FN-" + testUser.getUsername() + " LN-" + testUser.getUsername());
|
||||
bucket1.assertThat().field("filterQuery").is("modifier:\"" + testUser.getUsername() + "\"");
|
||||
bucket1.assertThat().field("count").is(1);
|
||||
}
|
||||
|
||||
@Test(priority=15)
|
||||
public void searchSpecialCharacters()
|
||||
{
|
||||
String specialCharfileName = "è¥äæ§ç§-åæ.pdf";
|
||||
FileModel file = new FileModel(specialCharfileName, "è¥äæ§ç§-忬¯¸" + "è¥äæ§ç§-忬¯¸", "è¥äæ§ç§-忬¯¸", FileType.TEXT_PLAIN,
|
||||
"Text file with Special Characters: " + specialCharfileName);
|
||||
dataContent.usingUser(testUser).usingSite(testSite).createContent(file);
|
||||
|
||||
waitForIndexing(file.getName(), true);
|
||||
|
||||
SearchRequest searchReq = createQuery("name:'" + specialCharfileName + "'");
|
||||
SearchResponse nodes = query(searchReq);
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
|
||||
int searchCount = 0;
|
||||
while (nodes.isEmpty() && searchCount < SEARCH_MAX_ATTEMPTS)
|
||||
{
|
||||
// Wait for the solr indexing (eventual consistency).
|
||||
Utility.waitToLoopTime(properties.getSolrWaitTimeInSeconds(), "Wait For Results After Indexing. Retry Attempt: " + (searchCount + 1));
|
||||
nodes = query(searchReq);
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
}
|
||||
|
||||
nodes.assertThat().entriesListIsNotEmpty();
|
||||
restClient.onResponse().assertThat().body("list.entries.entry[0].name", Matchers.equalToIgnoringCase(specialCharfileName));
|
||||
}
|
||||
}
|
@@ -0,0 +1,345 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Search Services E2E Test
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.test.search.functional.searchServices.search;
|
||||
|
||||
import org.alfresco.test.search.functional.AbstractSearchExactTermTest;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
/**
|
||||
* Tests including all different tokenization (false, true, both) modes with Exact Term queries.
|
||||
* Since Search Services are not configured with Cross Locale enabled, some errors and omissions are expected.
|
||||
* These tests are based in AFTSDefaultTextQueryIT class, but an additional type of property
|
||||
* has been added (tok:true) in order to provide full coverage for the available options.
|
||||
*
|
||||
* Since tok:true and tok:both properties are not supported to be used with exact term search,
|
||||
* exception from this queries is expected.
|
||||
*
|
||||
* SOLR log is dumping the cause of the error, for instance:
|
||||
* java.lang.UnsupportedOperationException: Exact Term search is not supported unless you configure the field
|
||||
* <{http://www.alfresco.org/model/tokenised/1.0}true> for cross locale search
|
||||
*
|
||||
* Note that tests not specifying a searching field in the query (for instance, =run) are using
|
||||
* by default following properties: cm:name, cm:title, cm:description, cm:content
|
||||
* Since cm:name is the only one declared as Cross Locale by default in shared.properties,
|
||||
* these kind of queries are being executed only for cm:name property.
|
||||
*/
|
||||
public class SearchExactTermTest extends AbstractSearchExactTermTest
|
||||
{
|
||||
|
||||
@Test
|
||||
public void exactSearch_singleTerm_shouldReturnResultsContainingExactTermInName() throws Exception
|
||||
{
|
||||
/*
|
||||
* 1 result is expected:
|
||||
* - Document #2 >> name: "Run"
|
||||
*/
|
||||
assertResponseCardinality("=run", 1);
|
||||
|
||||
/*
|
||||
* No result for runner in cm:name property, one record has runners in "description" property.
|
||||
* You can see the difference between exact search and not
|
||||
*/
|
||||
assertResponseCardinality("=runner", 0);
|
||||
assertResponseCardinality("runner", 1);
|
||||
|
||||
/*
|
||||
* 1 result is expected:
|
||||
* - Document #2 >> name: "Jump"
|
||||
*/
|
||||
assertResponseCardinality("=jump", 1);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exactSearch_singleTermConjunction_shouldReturnFullFieldValueMatch() throws Exception
|
||||
{
|
||||
|
||||
/**
|
||||
* Since REST API is getting the results from DB or Search Services, using single term expressions is always
|
||||
* retrieved from DB when using default configuration "solr.query.fts.queryConsistency=TRANSACTIONAL_IF_POSSIBLE".
|
||||
* Combining this single term with range queries (like cm:created) will ensure the results
|
||||
* are coming from SOLR in this mode.
|
||||
*/
|
||||
|
||||
/*
|
||||
* 1 result is expected for non-tokenised field (tok:false)
|
||||
* - Document #4 >> title: "Running"
|
||||
*/
|
||||
assertResponseCardinality("=tok:false:Running AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 1);
|
||||
|
||||
/*
|
||||
* 0 results are expected: there is no result that have exactly cm:title:"Run"
|
||||
* The closest we have is record Run (tok:false:"Run : a philosophy")
|
||||
* As you can see we don't have a full match, so it's not in the results.
|
||||
*
|
||||
*/
|
||||
assertResponseCardinality("=tok:false:Run AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 0);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* These tests should be re-enabled once the following tickets have been solved:
|
||||
* - https://alfresco.atlassian.net/browse/SEARCH-2461
|
||||
* - https://alfresco.atlassian.net/browse/SEARCH-2953
|
||||
*/
|
||||
@Test(enabled=false)
|
||||
public void failing_exactSearch_singleTermConjunction_shouldReturnFullFieldValueMatch() throws Exception
|
||||
{
|
||||
|
||||
// SEARCH-2953
|
||||
assertResponseCardinality("=tok:false:running AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 1);
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:false:running", 1);
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:false:Running", 1);
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:false:Run", 0);
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void exactSearch_singleTermConjunction_shouldReturnException() throws Exception
|
||||
{
|
||||
|
||||
/**
|
||||
* Since REST API is getting the results from DB or Search Services, using single term expressions is always
|
||||
* retrieved from DB when using default configuration "solr.query.fts.queryConsistency=TRANSACTIONAL_IF_POSSIBLE".
|
||||
* Combining this single term with range queries (like cm:created) will ensure the results
|
||||
* are coming from SOLR in this mode.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Unsupported Exception is expected when using exact term search with tokenised properties
|
||||
*/
|
||||
assertException("=tok:true:running AND cm:created:['" + fromDate + "' TO '" + toDate + "']");
|
||||
assertException("=tok:both:running AND cm:created:['" + fromDate + "' TO '" + toDate + "']");
|
||||
|
||||
/**
|
||||
* Unsupported Exception is expected when using exact term search with tokenised properties
|
||||
*/
|
||||
assertException("=tok:true:Running AND cm:created:['" + fromDate + "' TO '" + toDate + "']");
|
||||
assertException("=tok:both:Running AND cm:created:['" + fromDate + "' TO '" + toDate + "']");
|
||||
|
||||
/**
|
||||
* Unsupported Exception is expected when using exact term search with tokenised properties
|
||||
*/
|
||||
assertException("=tok:true:Run AND cm:created:['" + fromDate + "' TO '" + toDate + "']");
|
||||
assertException("=tok:both:Run AND cm:created:['" + fromDate + "' TO '" + toDate + "']");
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* These tests should be re-enabled once the following tickets have been solved:
|
||||
* - https://alfresco.atlassian.net/browse/SEARCH-2461
|
||||
*/
|
||||
@Test(enabled=false)
|
||||
public void failing_exactSearch_singleTermConjunction_shouldReturnException() throws Exception
|
||||
{
|
||||
|
||||
// SEARCH-2461
|
||||
assertException("=tok:true:running");
|
||||
assertException("=tok:both:running");
|
||||
|
||||
// SEARCH-2461
|
||||
assertException("=tok:true:Running");
|
||||
assertException("=tok:both:Running");
|
||||
|
||||
// SEARCH-2461
|
||||
assertException("=tok:true:Run");
|
||||
assertException("=tok:both:Run");
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exactSearch_multiTerm_shouldReturnResultsContainingExactTerm() throws Exception
|
||||
{
|
||||
/*
|
||||
* 2 results are expected:
|
||||
* - Document #2 >> name: "Run"
|
||||
* - Document #4 >> name: "Jump"
|
||||
*/
|
||||
assertResponseCardinality("=run =jump", 2);
|
||||
|
||||
/*
|
||||
* No result for runner or jumper in cm:name property
|
||||
* One document has runners and another record has jumpers in description
|
||||
* You can see the difference between exact search and not
|
||||
*/
|
||||
assertResponseCardinality("=runner =jumper", 0);
|
||||
assertResponseCardinality("runner jumper", 2);
|
||||
|
||||
/*
|
||||
* 2 results are expected:
|
||||
* - Document #1 >> name: "Running"
|
||||
* - Document #5 >> name: "Running jumping"
|
||||
*/
|
||||
assertResponseCardinality("=running =jumping", 2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exactSearch_multiTermInFieldWithOnlyUnTokenizedAnalysis_shouldReturnFullFieldValueMatch() throws Exception
|
||||
{
|
||||
|
||||
/*
|
||||
* 1 result is expected
|
||||
* - Document #4 >> title: "Running"
|
||||
*/
|
||||
assertResponseCardinality("=tok:false:Running =tok:false:jumpers AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 1);
|
||||
|
||||
/**
|
||||
* Unsupported Exception is expected when using exact term search with tokenised properties
|
||||
*/
|
||||
assertException("=tok:both:running =tok:both:jumpers AND cm:created:['" + fromDate + "' TO '" + toDate + "']");
|
||||
assertException("=tok:true:running =tok:true:jumpers AND cm:created:['" + fromDate + "' TO '" + toDate + "']");
|
||||
}
|
||||
|
||||
/**
|
||||
* These tests should be re-enabled once the following tickets have been solved:
|
||||
* - https://alfresco.atlassian.net/browse/SEARCH-2461
|
||||
* - https://alfresco.atlassian.net/browse/SEARCH-2953
|
||||
*/
|
||||
@Test(enabled=false)
|
||||
public void failing_exactSearch_multiTermInFieldWithOnlyUnTokenizedAnalysis_shouldReturnFullFieldValueMatch() throws Exception
|
||||
{
|
||||
// SEARCH-2953
|
||||
assertResponseCardinality("=tok:false:running =tok:false:jumpers AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 1);
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:false:running =tok:false:jumpers", 1);
|
||||
assertException("=tok:both:running =tok:both:jumpers");
|
||||
assertException("=tok:true:running =tok:true:jumpers");
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exactSearch_exactPhrase_shouldReturnResultsContainingExactPhrase() throws Exception
|
||||
{
|
||||
/*
|
||||
* No result for "run jump" in cm:name property
|
||||
*/
|
||||
assertResponseCardinality("=\"run jump\"", 0);
|
||||
|
||||
/*
|
||||
* No result for "runner jumper" in cm:name property
|
||||
* One document has runners jumpers in description
|
||||
* You can see the difference between exact search and not
|
||||
*/
|
||||
assertResponseCardinality("=\"runner jumper\"", 0);
|
||||
assertResponseCardinality("\"runner jumper\"", 1);
|
||||
|
||||
/*
|
||||
* 1 result is expected for exact term search
|
||||
* - Document #5 >> name: "Running jumping"
|
||||
*/
|
||||
assertResponseCardinality("=\"running jumping\"", 1);
|
||||
|
||||
/*
|
||||
* 5 results are expected for not exact term search:
|
||||
* - Document #1 >> name: "Running", description: "Running is a sport is a nice activity", content: "when you are running you are doing an amazing sport", title: "Running jumping"
|
||||
* - Document #2 >> name: "Run", description: "you are supposed to run jump", content: "after many runs you are tired and if you jump it happens the same", title: "Run : a philosophy"
|
||||
* - Document #3 >> title: "Running jumping twice jumpers"
|
||||
* - Document #4 >> content: "runnings jumpings", title: "Running"
|
||||
* - Document #5 >> name: "Running jumping", title: "Running the art of jumping"
|
||||
*/
|
||||
assertResponseCardinality("\"running jumping\"", 5);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exactSearch_phraseInFieldConjunction_shouldReturnFullFieldValueMatch() throws Exception
|
||||
{
|
||||
/*
|
||||
* 1 result is expected for exact term search
|
||||
* - Document #5 >> name: "Running jumping"
|
||||
*/
|
||||
assertResponseCardinality("=tok:false:\"Running jumping\" AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 1);
|
||||
|
||||
/*
|
||||
* No result for "Running jumping twice" in cm:name property is expected
|
||||
*/
|
||||
assertResponseCardinality("=tok:false:\"Running jumping twice\" AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 0);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* These tests should be re-enabled once the following tickets have been solved:
|
||||
* - https://alfresco.atlassian.net/browse/SEARCH-2461
|
||||
* - https://alfresco.atlassian.net/browse/SEARCH-2953
|
||||
*/
|
||||
@Test(enabled=false)
|
||||
public void failing_exactSearch_phraseInFieldConjunction_shouldReturnFullFieldValueMatch() throws Exception
|
||||
{
|
||||
|
||||
// SEARCH-2953
|
||||
assertResponseCardinality("=tok:false:\"running jumping\" AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 1);
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:false:\"running jumping\"", 1);
|
||||
assertResponseCardinality("=tok:false:\"Running jumping\"", 1);
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:false:\"Running jumping twice\"", 0);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exactSearch_phraseInFieldConjunction_shouldReturnOrException() throws Exception
|
||||
{
|
||||
/**
|
||||
* Unsupported Exception is expected when using exact term search with tokenised properties
|
||||
*/
|
||||
assertException("=tok:true:\"running jumping\" AND cm:created:['" + fromDate + "' TO '" + toDate + "']");
|
||||
assertException("=tok:both:\"running jumping\" AND cm:created:['" + fromDate + "' TO '" + toDate + "']");
|
||||
|
||||
/**
|
||||
* Unsupported Exception is expected when using exact term search with tokenised properties
|
||||
*/
|
||||
assertException("=tok:true:\"Running jumping twice\" AND cm:created:['" + fromDate + "' TO '" + toDate + "']");
|
||||
assertException("=tok:both:\"Running jumping twice\" AND cm:created:['" + fromDate + "' TO '" + toDate + "']");
|
||||
}
|
||||
|
||||
/**
|
||||
* These tests should be re-enabled once the following tickets have been solved:
|
||||
* - https://alfresco.atlassian.net/browse/SEARCH-2461
|
||||
*/
|
||||
@Test(enabled=false)
|
||||
public void failing_exactSearch_phraseInFieldConjunction_shouldReturnException() throws Exception
|
||||
{
|
||||
|
||||
// SEARCH-2461
|
||||
assertException("=tok:true:\"Running jumping\"");
|
||||
assertException("=tok:both:\"Running jumping\"");
|
||||
|
||||
// SEARCH-2461
|
||||
assertException("=tok:true:\"Running jumping twice\"");
|
||||
assertException("=tok:both:\"Running jumping twice\"");
|
||||
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,96 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Search Services E2E Test
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
package org.alfresco.test.search.functional.searchServices.search;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.rest.search.SearchResponse;
|
||||
import org.alfresco.test.search.functional.AbstractE2EFunctionalTest;
|
||||
import org.alfresco.utility.data.DataGroup;
|
||||
import org.alfresco.utility.model.GroupModel;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.testng.Assert;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
/**
|
||||
* Test class tests PARENT field is including all the PARENT Nodes
|
||||
* Created for SEARCH-2378
|
||||
*/
|
||||
public class SearchParentField extends AbstractE2EFunctionalTest
|
||||
{
|
||||
@Autowired
|
||||
protected DataGroup dataGroup;
|
||||
|
||||
List<GroupModel> groups;
|
||||
|
||||
@BeforeClass(alwaysRun = true)
|
||||
public void dataPreparation()
|
||||
{
|
||||
|
||||
groups = new ArrayList<>();
|
||||
groups.add(dataGroup.createRandomGroup());
|
||||
groups.add(dataGroup.createRandomGroup());
|
||||
|
||||
dataGroup.addListOfUsersToGroup(groups.get(0), testUser);
|
||||
dataGroup.addListOfUsersToGroup(groups.get(1), testUser);
|
||||
|
||||
waitForIndexing(
|
||||
"TYPE:'cm:authorityContainer' AND cm:authorityName:'GROUP_" + groups.get(1).getGroupIdentifier() + "'",
|
||||
true);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Test users in groups can be found using PARENT expressions.
|
||||
*/
|
||||
@Test(priority = 1)
|
||||
public void testSearchParentForPerson() throws Exception
|
||||
{
|
||||
|
||||
for (GroupModel group : groups)
|
||||
{
|
||||
|
||||
// Find groupId to be used in the PARENT expression
|
||||
String queryGroup = "TYPE:'cm:authorityContainer' AND cm:authorityName:'GROUP_" + group.getGroupIdentifier()
|
||||
+ "'";
|
||||
SearchResponse response = queryAsUser(dataUser.getAdminUser(), queryGroup);
|
||||
String groupId = response.getEntries().get(0).getModel().getId();
|
||||
|
||||
// Find the user assigned as member of this group with PARENT clause
|
||||
String queryParentGroup = "(TYPE:'cm:person' OR TYPE:'cm:authorityContainer') AND PARENT:'workspace://SpacesStore/"
|
||||
+ groupId + "'";
|
||||
|
||||
response = queryAsUser(dataUser.getAdminUser(), queryParentGroup);
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
Assert.assertEquals(response.getPagination().getCount(), 1, "Expecting 1 user (" + testUser.getUsername()
|
||||
+ ") as member of this group (" + group.getGroupIdentifier() + ")");
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
@@ -65,7 +65,7 @@ public class SearchQueryPaginationTest extends AbstractCmisE2ETest
|
||||
|
||||
waitForMetadataIndexing(testFile.getName(), true);
|
||||
}
|
||||
|
||||
|
||||
@Test(priority = 1, groups = { TestGroup.ACS_62n})
|
||||
public void testCmisSearchWithPagination()
|
||||
{
|
||||
@@ -208,7 +208,8 @@ public class SearchQueryPaginationTest extends AbstractCmisE2ETest
|
||||
Assert.assertFalse(response.getPagination().isHasMoreItems(), "Incorrect: hasMoreItems");
|
||||
}
|
||||
|
||||
@Test(priority = 4)
|
||||
@Test(priority = 4, enabled = false)
|
||||
// https://issues.alfresco.com/jira/browse/SEARCH-2541
|
||||
public void testSearchApiPagination()
|
||||
{
|
||||
// Search for the files with specific title
|
||||
|
@@ -0,0 +1,54 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Search Services E2E Test
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2023 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.test.search.functional.searchServices.search;
|
||||
|
||||
import org.alfresco.rest.search.SearchResponse;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
public class SearchSimpleCasesTest extends AbstractSearchServicesE2ETest
|
||||
{
|
||||
@BeforeClass(alwaysRun = true)
|
||||
public void dataPreparation() throws Exception
|
||||
{
|
||||
searchServicesDataPreparation();
|
||||
waitForContentIndexing(file4.getContent(), true);
|
||||
}
|
||||
|
||||
@Test(priority=1)
|
||||
public void testSearchContentField()
|
||||
{
|
||||
SearchResponse response4 = queryUntilResponseEntriesListNotEmpty(testUser, "cm:content:unique");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
response4.assertThat().entriesListIsNotEmpty();
|
||||
}
|
||||
|
||||
@Test(priority=2)
|
||||
public void testSearchDocxFile()
|
||||
{
|
||||
SearchResponse response6 = queryUntilResponseEntriesListNotEmpty(testUser, "cm:name:alfresco.docx");
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
response6.assertThat().entriesListIsNotEmpty();
|
||||
}
|
||||
}
|
@@ -115,9 +115,9 @@ public class SearchTest extends AbstractSearchServicesE2ETest
|
||||
public void searchWithOneSortClause()
|
||||
{
|
||||
// Tests the ascending order first
|
||||
List<String> expectedOrder = asList("alfresco.txt", "cars.txt", "pangram.txt");
|
||||
List<String> expectedOrder = asList("alfresco.docx", "cars.PDF", "pangram.txt");
|
||||
|
||||
SearchRequest searchRequest = createQuery("cm_name:alfresco\\.txt cm_name:cars\\.txt cm_name:pangram\\.txt");
|
||||
SearchRequest searchRequest = createQuery("cm_name:alfresco\\.docx cm_name:cars\\.PDF cm_name:pangram\\.txt");
|
||||
searchRequest.addSortClause("FIELD", "name", true);
|
||||
|
||||
RestRequestFilterQueryModel filters = new RestRequestFilterQueryModel();
|
||||
@@ -162,9 +162,9 @@ public class SearchTest extends AbstractSearchServicesE2ETest
|
||||
public void searchWithTwoSortClauses()
|
||||
{
|
||||
// Tests the ascending order first
|
||||
List<String> expectedOrder = asList("alfresco.txt", "cars.txt", "pangram.txt");
|
||||
List<String> expectedOrder = asList("alfresco.docx", "cars.PDF", "pangram.txt");
|
||||
|
||||
SearchRequest searchRequest = createQuery("cm_name:alfresco\\.txt cm_name:cars\\.txt cm_name:pangram\\.txt");
|
||||
SearchRequest searchRequest = createQuery("cm_name:alfresco\\.docx cm_name:cars\\.PDF cm_name:pangram\\.txt");
|
||||
searchRequest.addSortClause("FIELD", "name", true);
|
||||
searchRequest.addSortClause("FIELD", "createdByUser.id", true);
|
||||
|
||||
|
@@ -0,0 +1,386 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Search Services E2E Test
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2021 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
package org.alfresco.test.search.functional.searchServices.search.crosslocale;
|
||||
|
||||
import org.alfresco.test.search.functional.AbstractSearchExactTermTest;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
|
||||
/**
|
||||
* Tests including all different tokenization (false, true, both) modes with Exact Term queries.
|
||||
* Search Services must be configured with Cross Locale enabled in order to run these tests.
|
||||
* These tests are based in AFTSDefaultTextQueryIT class, but an additional type of property
|
||||
* has been added (tok:true) in order to provide full coverage for the available options.
|
||||
*/
|
||||
public class SearchExactTermCrossLocaleTest extends AbstractSearchExactTermTest
|
||||
{
|
||||
|
||||
/**
|
||||
* Note these tests are searching in cm:name, cm:title, cm:description and cm:content properties
|
||||
*/
|
||||
@Test
|
||||
public void exactSearch_singleTerm_shouldReturnResultsContainingExactTerm() throws Exception
|
||||
{
|
||||
/*
|
||||
* 2 results are expected:
|
||||
* - Document #2 >> name: "Run", description: "you are supposed to run jump", title: "Run : a philosophy"
|
||||
* - Document #5 >> content: "run is Good as jump"
|
||||
*/
|
||||
assertResponseCardinality("=run", 2);
|
||||
|
||||
/*
|
||||
* No result for runner, Document #5 has "runners" in description,
|
||||
* you can see the difference between exact search and not
|
||||
*/
|
||||
assertResponseCardinality("=runner", 0);
|
||||
assertResponseCardinality("runner", 1);
|
||||
|
||||
/*
|
||||
* 3 results are expected:
|
||||
* - Document #2 >> description: "you are supposed to run jump", content: "after many runs you are tired and if you jump it happens the same"
|
||||
* - Document #4 >> name: "Jump"
|
||||
* - Document #5 >> content: "run is Good as jump"
|
||||
*/
|
||||
assertResponseCardinality("=jump", 3);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exactSearch_singleTermConjunction_shouldReturnFullFieldValueMatch() throws Exception
|
||||
{
|
||||
|
||||
/**
|
||||
* Since REST API is getting the results from DB or Search Services, using single term expressions is always
|
||||
* retrieved from DB when using default configuration "solr.query.fts.queryConsistency=TRANSACTIONAL_IF_POSSIBLE".
|
||||
* Combining this single term with range queries (like cm:created) will ensure the results
|
||||
* are coming from SOLR in this mode.
|
||||
*/
|
||||
|
||||
/*
|
||||
* 1 result is expected for non-tokenised field (tok:false)
|
||||
* - Document #4 >> title: "Running"
|
||||
*/
|
||||
assertResponseCardinality("=tok:false:Running AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 1);
|
||||
|
||||
/*
|
||||
* 0 results are expected for non-tokenised field (tok:false), as there is no title: "Run"
|
||||
*/
|
||||
assertResponseCardinality("=tok:false:Run AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 0);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* These tests should be re-enabled once the following tickets have been solved:
|
||||
* - https://alfresco.atlassian.net/browse/SEARCH-2461
|
||||
* - https://alfresco.atlassian.net/browse/SEARCH-2953
|
||||
*/
|
||||
@Test(enabled=false)
|
||||
public void failing_exactSearch_singleTerm_shouldReturnFullFieldValueMatch() throws Exception
|
||||
{
|
||||
|
||||
// SEARCH-2953
|
||||
assertResponseCardinality("=tok:false:running AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 1);
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:false:running", 1);
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:false:Running", 1);
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:false:Run", 0);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exactSearch_singleTermConjunction_shouldReturnPartialFieldValueMatch() throws Exception
|
||||
{
|
||||
|
||||
/**
|
||||
* Since REST API is getting the results from DB or Search Services, using single term expressions is always
|
||||
* retrieved from DB when using default configuration "solr.query.fts.queryConsistency=TRANSACTIONAL_IF_POSSIBLE".
|
||||
* Combining this single term with range queries (like cm:created) will ensure the results
|
||||
* are coming from SOLR in this mode.
|
||||
*/
|
||||
|
||||
/*
|
||||
* 4 results are expected for tokenised fields (tok:true, tok:both)
|
||||
* - Document #1 >> title: "Running jumping"
|
||||
* - Document #3 >> title: "Running jumping twice jumpers"
|
||||
* - Document #4 >> title: "Running"
|
||||
* - Document #5 >> title: "Running the art of jumping"
|
||||
*/
|
||||
assertResponseCardinality("=tok:true:running AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 4);
|
||||
assertResponseCardinality("=tok:both:running AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 4);
|
||||
|
||||
/**
|
||||
* 4 results are expected for tokenised fields (tok:true, tok:both)
|
||||
* - Document #1 >> title: "Running jumping"
|
||||
* - Document #3 >> title: "Running jumping twice jumpers"
|
||||
* - Document #4 >> title: "Running"
|
||||
* - Document #5 >> title: "Running the art of jumping"
|
||||
*/
|
||||
assertResponseCardinality("=tok:true:Running AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 4);
|
||||
assertResponseCardinality("=tok:both:Running AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 4);
|
||||
|
||||
/**
|
||||
* 1 result is expected for tokenised fields (tok:true, tok:both)
|
||||
* - Document #2 >> title: "Run : a philosophy"
|
||||
*/
|
||||
assertResponseCardinality("=tok:true:Run AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 1);
|
||||
assertResponseCardinality("=tok:both:Run AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 1);
|
||||
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* These tests should be re-enabled once the following tickets have been solved:
|
||||
* - https://alfresco.atlassian.net/browse/SEARCH-2461
|
||||
*/
|
||||
@Test(enabled=false)
|
||||
public void failing_exactSearch_singleTerm_shouldReturnPartialFieldValueMatch() throws Exception
|
||||
{
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:true:running", 4);
|
||||
assertResponseCardinality("=tok:both:running", 4);
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:true:Running", 4);
|
||||
assertResponseCardinality("=tok:both:Running", 4);
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:true:Run", 1);
|
||||
assertResponseCardinality("=tok:both:Run", 1);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Note these tests are searching in cm:name, cm:title, cm:description and cm:content properties
|
||||
*/
|
||||
@Test
|
||||
public void exactSearch_multiTerm_shouldReturnResultsContainingExactTerm() throws Exception
|
||||
{
|
||||
/*
|
||||
* 3 results are expected:
|
||||
* - Document #2 >> name: "Run", description: "you are supposed to run jump", title: "Run : a philosophy", content: "after many runs you are tired and if you jump it happens the same"
|
||||
* - Document #4 >> name: "Jump"
|
||||
* - Document #5 >> content: "run is Good as jump"
|
||||
*/
|
||||
assertResponseCardinality("=run =jump", 3);
|
||||
|
||||
/*
|
||||
* No result for runner or jumper
|
||||
* Document #3 has "jumpers" in description and title
|
||||
* Document #5 has "runners" and "jumpers" in description
|
||||
* You can see the difference between exact search and not
|
||||
*/
|
||||
assertResponseCardinality("=runner =jumper", 0);
|
||||
assertResponseCardinality("runner jumper", 2);
|
||||
|
||||
/*
|
||||
* 5 results are expected:
|
||||
* - Document #1 >> name: "Running", description: "Running is a sport is a nice activity", content: "when you are running you are doing an amazing sport", title: "Running jumping"
|
||||
* - Document #2 >> name: "Run", description: "you are supposed to run jump", content: "after many runs you are tired and if you jump it happens the same", title: "Run : a philosophy"
|
||||
* - Document #3 >> title: "Running jumping twice jumpers"
|
||||
* - Document #4 >> content: "runnings jumpings", title: "Running"
|
||||
* - Document #5 >> name: "Running jumping", title: "Running the art of jumping"
|
||||
*/
|
||||
assertResponseCardinality("=running =jumping", 5);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exactSearch_multiTermInField_shouldReturnPartialFieldValueMatch() throws Exception
|
||||
{
|
||||
/**
|
||||
* 4 results are expected for tokenised fields (tok:true, tok:both)
|
||||
* - Document #1 >> title: "Running jumping"
|
||||
* - Document #3 >> title: "Running jumping twice jumpers"
|
||||
* - Document #4 >> title: "Running"
|
||||
* - Document #5 >> title: "Running the art of jumping"
|
||||
*/
|
||||
assertResponseCardinality("=tok:both:running =tok:both:jumpers AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 4);
|
||||
assertResponseCardinality("=tok:true:running =tok:true:jumpers AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 4);
|
||||
}
|
||||
|
||||
/**
|
||||
* These tests should be re-enabled once the following tickets have been solved:
|
||||
* - https://alfresco.atlassian.net/browse/SEARCH-2461
|
||||
*/
|
||||
@Test(enabled=false)
|
||||
public void failing_exactSearch_multiTermInField_shouldReturnPartialFieldValueMatch() throws Exception
|
||||
{
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:both:running =tok:both:jumpers", 4);
|
||||
assertResponseCardinality("=tok:true:running =tok:true:jumpers", 4);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* These tests should be re-enabled once the following tickets have been solved:
|
||||
* - https://alfresco.atlassian.net/browse/SEARCH-2461
|
||||
* - https://alfresco.atlassian.net/browse/SEARCH-2953
|
||||
*/
|
||||
@Test(enabled=false)
|
||||
public void failing_exactSearch_multiTermInField_shouldReturnFullFieldValueMatch() throws Exception
|
||||
{
|
||||
|
||||
// SEARCH-2953
|
||||
assertResponseCardinality("=tok:false:running =tok:false:jumpers AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 1);
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:false:running =tok:false:jumpers", 0);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Note these tests are searching in cm:name, cm:title, cm:description and cm:content properties
|
||||
*/
|
||||
@Test
|
||||
public void exactSearch_exactPhrase_shouldReturnResultsContainingExactPhrase() throws Exception
|
||||
{
|
||||
/*
|
||||
* 1 results are expected:
|
||||
* - Document #2 >> description: "you are supposed to run jump"
|
||||
*/
|
||||
assertResponseCardinality("=\"run jump\"", 1);
|
||||
|
||||
/*
|
||||
* No result for "runner jumper" using exact term search
|
||||
* Document #5 has "runners" and "jumpers" in description, so it should be a result for not exact term search
|
||||
* You can see the difference between exact search and not
|
||||
*/
|
||||
assertResponseCardinality("=\"runner jumper\"", 0);
|
||||
assertResponseCardinality("\"runner jumper\"", 1);
|
||||
|
||||
/*
|
||||
* 3 results are expected for exact term search:
|
||||
* - Document #1 >> title: "Running jumping"
|
||||
* - Document #3 >> title: "Running jumping twice jumpers"
|
||||
* - Document #5 >> name: "Running jumping"
|
||||
*
|
||||
* When not using exact term search, 4 results are expected
|
||||
* Since 'Milestone' wiki page (coming from ootb content) is including "running" in the content,
|
||||
* we are checking for 5 results instead of 4
|
||||
*
|
||||
* You can see the difference between exact search and not
|
||||
*/
|
||||
assertResponseCardinality("=\"running jumping\"", 3);
|
||||
assertResponseCardinality("\"running jumping\"", 5);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exactSearch_phraseInFieldConjunction_shouldReturnFullFieldValueMatch() throws Exception
|
||||
{
|
||||
/**
|
||||
* 1 results is expected for non-tokenised field (tok:false)
|
||||
* - Document #1 >> title: "Running jumping"
|
||||
*/
|
||||
assertResponseCardinality("=tok:false:\"Running jumping\" AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 1);
|
||||
|
||||
/**
|
||||
* No result is expected for non-tokenised field (tok:false), as there is no title: "Running jumping twice"
|
||||
*/
|
||||
assertResponseCardinality("=tok:false:\"Running jumping twice\" AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 0);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* These tests should be re-enabled once the following tickets have been solved:
|
||||
* - https://alfresco.atlassian.net/browse/SEARCH-2461
|
||||
* - https://alfresco.atlassian.net/browse/SEARCH-2953
|
||||
*/
|
||||
@Test(enabled=false)
|
||||
public void failing_exactSearch_phraseInFieldConjunction_shouldReturnFullFieldValueMatch() throws Exception
|
||||
{
|
||||
|
||||
// SEARCH-2953
|
||||
assertResponseCardinality("=tok:false:\"running jumping\" AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 1);
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:false:\"running jumping\"", 1);
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:false:\"Running jumping\"", 1);
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:false:\"Running jumping twice\"", 0);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exactSearch_phraseInFieldConjunction_shouldReturnPartialFieldValueMatch() throws Exception
|
||||
{
|
||||
/**
|
||||
* 2 results are expected for tokenised fields (tok:true, tok:both)
|
||||
* - Document #1 >> title: "Running jumping"
|
||||
* - Document #3 >> title: "Running jumping twice jumpers"
|
||||
*/
|
||||
assertResponseCardinality("=tok:true:\"running jumping\" AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 2);
|
||||
assertResponseCardinality("=tok:both:\"running jumping\" AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 2);
|
||||
|
||||
/**
|
||||
* 2 results are expected for tokenised fields (tok:true, tok:both)
|
||||
* - Document #1 >> title: "Running jumping"
|
||||
* - Document #3 >> title: "Running jumping twice jumpers"
|
||||
*/
|
||||
assertResponseCardinality("=tok:true:\"Running jumping\" AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 2);
|
||||
assertResponseCardinality("=tok:both:\"Running jumping\" AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 2);
|
||||
|
||||
/**
|
||||
* 1 result is expected for tokenised fields (tok:true, tok:both)
|
||||
* - Document #3 >> title: "Running jumping twice jumpers"
|
||||
*/
|
||||
assertResponseCardinality("=tok:true:\"Running jumping twice\" AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 1);
|
||||
assertResponseCardinality("=tok:both:\"Running jumping twice\" AND cm:created:['" + fromDate + "' TO '" + toDate + "']", 1);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* These tests should be re-enabled once the following tickets have been solved:
|
||||
* - https://alfresco.atlassian.net/browse/SEARCH-2461
|
||||
*/
|
||||
@Test(enabled=false)
|
||||
public void failing_exactSearch_phraseInFieldConjunction_shouldReturnPartialFieldValueMatch() throws Exception
|
||||
{
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:true:\"running jumping\"", 2);
|
||||
assertResponseCardinality("=tok:both:\"running jumping\"", 2);
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:true:\"Running jumping\"", 2);
|
||||
assertResponseCardinality("=tok:both:\"Running jumping\"", 2);
|
||||
|
||||
// SEARCH-2461
|
||||
assertResponseCardinality("=tok:true:\"Running jumping twice\"", 1);
|
||||
assertResponseCardinality("=tok:both:\"Running jumping twice\"", 1);
|
||||
|
||||
}
|
||||
|
||||
}
|
@@ -24,7 +24,7 @@
|
||||
* #L%
|
||||
*/
|
||||
|
||||
package org.alfresco.test.search.functional.searchServices.search;
|
||||
package org.alfresco.test.search.functional.searchServices.search.fingerprint;
|
||||
|
||||
import static jersey.repackaged.com.google.common.collect.Sets.newHashSet;
|
||||
import static org.testng.Assert.assertTrue;
|
||||
@@ -161,7 +161,7 @@ public class FingerPrintTest extends AbstractE2EFunctionalTest
|
||||
SearchResponse response = query(fingerprintQuery);
|
||||
|
||||
int count = response.getEntries().size();
|
||||
assertTrue(count > 1);
|
||||
assertTrue(count >= 1);
|
||||
|
||||
Set<String> expectedNames = newHashSet();
|
||||
expectedNames.add(fileTaco.getName());
|
||||
@@ -190,7 +190,7 @@ public class FingerPrintTest extends AbstractE2EFunctionalTest
|
||||
testSearchQueryUnordered(fingerprintQuery, expectedNames, SearchLanguage.AFTS);
|
||||
}
|
||||
|
||||
@Test(priority = 5, enabled = false)
|
||||
@Test(priority = 5)
|
||||
@Bug(id = "SEARCH-2065")
|
||||
public void searchAfterVersionUpdate()
|
||||
{
|
||||
@@ -215,13 +215,13 @@ public class FingerPrintTest extends AbstractE2EFunctionalTest
|
||||
Assert.assertTrue(found, "Update File Not found in results for Fingerprint Query with updated content");
|
||||
}
|
||||
|
||||
@Test(priority = 6, enabled = false)
|
||||
@Test(priority = 6)
|
||||
@Bug(id = "SEARCH-2065")
|
||||
public void searchAfterVersionRevert() throws Exception
|
||||
{
|
||||
// Revert fileToBeUpdated to previous version
|
||||
restClient.authenticateUser(testUser).withCoreAPI().usingNode(fileToBeUpdated).revertVersion("1.0", "{}");
|
||||
String revertedContent = restClient.authenticateUser(testUser).withCoreAPI().usingNode(fileToBeUpdated).getVersionContent("1.2").toString();
|
||||
String revertedContent = restClient.authenticateUser(testUser).withCoreAPI().usingNode(fileToBeUpdated).getVersionContent("1.2").getResponse().asString();
|
||||
Assert.assertEquals(revertedContent, fileOriginal.getContent(), "Reverted content does not match Original");
|
||||
|
||||
// Wair for the new version of the file to be indexed
|
||||
@@ -236,7 +236,7 @@ public class FingerPrintTest extends AbstractE2EFunctionalTest
|
||||
Assert.assertTrue(notFound, "File appears in the results for Fingerprint Query even after reverting content changes");
|
||||
}
|
||||
|
||||
@Test(priority = 7, enabled = false)
|
||||
@Test(priority = 7)
|
||||
@Bug(id = "SEARCH-2065")
|
||||
public void searchAfterVersionDelete() throws Exception
|
||||
{
|
@@ -45,12 +45,12 @@ import org.testng.annotations.Test;
|
||||
public class SearchSolrAPITest extends AbstractE2EFunctionalTest
|
||||
{
|
||||
@Test(priority = 1)
|
||||
public void testGetSolrConfig() throws Exception
|
||||
public void testGetSolrConfig()
|
||||
{
|
||||
RestTextResponse response = restClient.authenticateUser(adminUserModel).withSolrAPI().getConfig();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
|
||||
restClient.onResponse().assertThat().content(Matchers.containsString("config"));
|
||||
restClient.onResponse().assertThat().body(Matchers.containsString("config"));
|
||||
Assert.assertNotNull(response.getJsonValueByPath("config.requestHandler"));
|
||||
Assert.assertNotNull(response.getJsonObjectByPath("config.requestHandler"));
|
||||
|
||||
@@ -65,7 +65,7 @@ public class SearchSolrAPITest extends AbstractE2EFunctionalTest
|
||||
}
|
||||
|
||||
@Test(priority = 2)
|
||||
public void testEditSolrConfig() throws Exception
|
||||
public void testEditSolrConfig()
|
||||
{
|
||||
String expectedError = "solrconfig editing is not enabled due to disable.configEdit";
|
||||
|
||||
@@ -85,7 +85,7 @@ public class SearchSolrAPITest extends AbstractE2EFunctionalTest
|
||||
restClient.authenticateUser(adminUserModel).withSolrAPI().postConfig(postBody);
|
||||
restClient.assertStatusCodeIs(HttpStatus.FORBIDDEN);
|
||||
|
||||
restClient.onResponse().assertThat().content(Matchers.containsString(expectedError));
|
||||
restClient.onResponse().assertThat().body(Matchers.containsString(expectedError));
|
||||
|
||||
// TODO: Following asserts fail with error:
|
||||
/*
|
||||
@@ -97,21 +97,21 @@ public class SearchSolrAPITest extends AbstractE2EFunctionalTest
|
||||
}
|
||||
|
||||
@Test(priority = 3)
|
||||
public void testGetSolrConfigOverlay() throws Exception
|
||||
public void testGetSolrConfigOverlay()
|
||||
{
|
||||
restClient.authenticateUser(adminUserModel).withSolrAPI().getConfigOverlay();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
|
||||
restClient.onResponse().assertThat().content(Matchers.containsString("overlay"));
|
||||
restClient.onResponse().assertThat().body(Matchers.containsString("overlay"));
|
||||
}
|
||||
|
||||
@Test(priority = 4)
|
||||
public void testGetSolrConfigParams() throws Exception
|
||||
public void testGetSolrConfigParams()
|
||||
{
|
||||
restClient.authenticateUser(adminUserModel).withSolrAPI().getConfigParams();
|
||||
restClient.assertStatusCodeIs(HttpStatus.OK);
|
||||
|
||||
restClient.onResponse().assertThat().content(Matchers.containsString("response"));
|
||||
restClient.onResponse().assertThat().body(Matchers.containsString("response"));
|
||||
}
|
||||
|
||||
@Test(priority = 5)
|
||||
@@ -127,4 +127,4 @@ public class SearchSolrAPITest extends AbstractE2EFunctionalTest
|
||||
String errorMsg = "No QueryObjectBuilder defined for node a in {q={!xmlparser";
|
||||
Assert.assertTrue(restClient.onResponse().getResponse().body().xmlPath().getString("response").contains(errorMsg));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,102 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Search Services E2E Test
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.test.search.functional.searchServices.solr.admin;
|
||||
|
||||
import static org.testng.Assert.assertEquals;
|
||||
import static org.testng.Assert.assertNotEquals;
|
||||
import static org.testng.Assert.assertTrue;
|
||||
|
||||
import java.util.Random;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
import org.alfresco.rest.core.RestResponse;
|
||||
import org.alfresco.test.search.functional.AbstractE2EFunctionalTest;
|
||||
import org.alfresco.utility.Utility;
|
||||
import org.alfresco.utility.model.FileModel;
|
||||
import org.alfresco.utility.model.FileType;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
/** E2E tests for the SUMMARY admin report. */
|
||||
public class SolrE2ESummaryTest extends AbstractE2EFunctionalTest
|
||||
{
|
||||
/** The maximum time to wait for a report to update (in ms). */
|
||||
private static final int MAX_TIME = 60 * 1000;
|
||||
/** The frequency to check the report (in ms). */
|
||||
private static final int RETRY_INTERVAL = 100;
|
||||
|
||||
/** Check the FTS section of the admin summary contains the expected fields. */
|
||||
@Test
|
||||
public void testFTSReport() throws Exception
|
||||
{
|
||||
RestResponse response = restClient.withParams("core=alfresco").withSolrAdminAPI().getAction("SUMMARY");
|
||||
|
||||
int toUpdate = response.getResponse().body().jsonPath().get("Summary.alfresco.FTS.'Node count whose content needs to be updated'");
|
||||
assertTrue(toUpdate >= 0, "Expecting non-negative pieces of content to need updating.");
|
||||
|
||||
int inSync = response.getResponse().body().jsonPath().get("Summary.alfresco.FTS.'Node count whose content is in sync'");
|
||||
assertTrue(inSync >= 0, "Expecting non-negative pieces of content to need updating.");
|
||||
}
|
||||
|
||||
/** Check that we can spot a document updating by using the SUMMARY report. */
|
||||
@Test
|
||||
public void testFTSReport_contentUpdate() throws Exception
|
||||
{
|
||||
RestResponse response2 = restClient.withParams("core=alfresco").withSolrAdminAPI().getAction("SUMMARY");
|
||||
int previousInSync = response2.getResponse().body().jsonPath().get("Summary.alfresco.FTS.'Node count whose content is in sync'");
|
||||
|
||||
FileModel file = new FileModel("file.txt", "file.txt", "", FileType.TEXT_PLAIN, "file.txt");
|
||||
FileModel content = dataContent.usingUser(adminUserModel).usingSite(testSite).createContent(file);
|
||||
|
||||
// Wait for the number of "in-sync" documents to increase (i.e. when the document is indexed).
|
||||
Utility.sleep(RETRY_INTERVAL, MAX_TIME, () -> {
|
||||
RestResponse response = restClient.withParams("core=alfresco").withSolrAdminAPI().getAction("SUMMARY");
|
||||
int inSync = response.getResponse().body().jsonPath().get("Summary.alfresco.FTS.'Node count whose content is in sync'");
|
||||
assertTrue(inSync > previousInSync, "Expected a document to be indexed.");
|
||||
});
|
||||
|
||||
// Wait for the number of outdated documents to become zero.
|
||||
Utility.sleep(RETRY_INTERVAL, MAX_TIME, () ->
|
||||
{
|
||||
RestResponse response = restClient.withParams("core=alfresco").withSolrAdminAPI().getAction("SUMMARY");
|
||||
int toUpdate = response.getResponse().body().jsonPath().get("Summary.alfresco.FTS.'Node count whose content needs to be updated'");
|
||||
assertEquals(toUpdate, 0, "Expected number of outdated documents to drop to zero.");
|
||||
});
|
||||
|
||||
// Update the document's content with a large amount of text.
|
||||
StringBuilder largeText = new StringBuilder("Big update");
|
||||
IntStream.range(0, 100000).forEach((i) -> largeText.append(" ").append(UUID.randomUUID().toString()));
|
||||
dataContent.usingUser(adminUserModel).usingResource(content).updateContent(largeText.toString());
|
||||
|
||||
// Expect to spot the number of outdated documents increase beyond zero.
|
||||
Utility.sleep(RETRY_INTERVAL, MAX_TIME, () ->
|
||||
{
|
||||
RestResponse response = restClient.withParams("core=alfresco").withSolrAdminAPI().getAction("SUMMARY");
|
||||
int toUpdate = response.getResponse().body().jsonPath().get("Summary.alfresco.FTS.'Node count whose content needs to be updated'");
|
||||
assertNotEquals(toUpdate, 0, "Expected number of outdated documents to be greater than zero.");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,122 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Search Services E2E Test
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
package org.alfresco.test.search.functional.searchServices.solr.admin;
|
||||
|
||||
import static org.testng.Assert.assertEquals;
|
||||
|
||||
import org.alfresco.rest.core.RestResponse;
|
||||
import org.alfresco.rest.search.RestRequestQueryModel;
|
||||
import org.alfresco.rest.search.SearchRequest;
|
||||
import org.alfresco.rest.search.SearchResponse;
|
||||
import org.alfresco.test.search.functional.AbstractE2EFunctionalTest;
|
||||
import org.alfresco.utility.data.CustomObjectTypeProperties;
|
||||
import org.alfresco.utility.model.FileModel;
|
||||
import org.alfresco.utility.model.FolderModel;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.testng.Assert;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
/**
|
||||
* Tests validating the results of SOLR REST API Actions
|
||||
*/
|
||||
@Configuration
|
||||
public class SolrE2eActionTest extends AbstractE2EFunctionalTest
|
||||
{
|
||||
|
||||
// DBID (sys:node-dbid) value for the document
|
||||
Integer dbId;
|
||||
|
||||
/**
|
||||
* Create a new document and get the DBID value for it
|
||||
*/
|
||||
@BeforeClass(alwaysRun = true)
|
||||
public void dataPreparation() throws Exception
|
||||
{
|
||||
|
||||
// Create a new document
|
||||
FolderModel folder = new FolderModel("folder-aspect");
|
||||
dataContent.usingUser(testUser).usingSite(testSite).createCustomContent(folder, "cmis:folder",
|
||||
new CustomObjectTypeProperties());
|
||||
|
||||
FileModel file = new FileModel("file-aspect-" + System.currentTimeMillis() + ".txt");
|
||||
file.setContent("content file aspect");
|
||||
dataContent.usingUser(testUser).usingResource(folder).createCustomContent(file, "cmis:document",
|
||||
new CustomObjectTypeProperties());
|
||||
|
||||
waitForMetadataIndexing(file.getName(), true);
|
||||
|
||||
// Get the DBID for the create document
|
||||
String queryFile = "cm:name:" + file.getName();
|
||||
restClient.authenticateUser(dataContent.getAdminUser()).withParams(queryFile).withSolrAPI()
|
||||
.getSelectQueryJson();
|
||||
dbId = Integer.valueOf(
|
||||
restClient.onResponse().getResponse().body().jsonPath().get("response.docs[0].DBID").toString());
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* REINDEX for specific core using DBID
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
public void testReindexNodeId()
|
||||
{
|
||||
|
||||
final String deleteQueryBody = "{\"delete\":{\"query\": \"DBID:" + dbId + "\"}}";
|
||||
|
||||
try
|
||||
{
|
||||
// Remove document from SOLR
|
||||
restClient.withSolrAPI().postAction("delete", deleteQueryBody);
|
||||
|
||||
// Re-index document using nodeId
|
||||
RestResponse response = restClient.withParams("core=alfresco", "nodeId=" + dbId).withSolrAdminAPI()
|
||||
.getAction("reindex");
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action.alfresco.status");
|
||||
Assert.assertEquals(actionStatus, "scheduled");
|
||||
waitForMetadataIndexing("DBID:" + dbId, true);
|
||||
|
||||
// Verify the node has been re-indexed to its original type "cm:content"
|
||||
String queryFile = "DBID:" + dbId;
|
||||
RestRequestQueryModel queryModel = new RestRequestQueryModel();
|
||||
queryModel.setQuery(queryFile);
|
||||
queryModel.setLanguage(SearchLanguage.AFTS.toString());
|
||||
SearchRequest searchRequest = new SearchRequest();
|
||||
searchRequest.setQuery(queryModel);
|
||||
SearchResponse searchResponse = restClient.authenticateUser(testUser).withSearchAPI().search(searchRequest);
|
||||
assertEquals(searchResponse.getEntries().get(0).getModel().getNodeType(), "cm:content");
|
||||
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -27,7 +27,6 @@
|
||||
package org.alfresco.test.search.functional.searchServices.solr.admin;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@@ -38,6 +37,8 @@ import org.springframework.context.annotation.Configuration;
|
||||
import org.testng.Assert;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
|
||||
/**
|
||||
* End to end tests for SOLR Admin actions REST API, available from:
|
||||
*
|
||||
@@ -556,9 +557,11 @@ public class SolrE2eAdminTest extends AbstractE2EFunctionalTest
|
||||
RestResponse response = restClient.withParams("txid=" + txid).withSolrAdminAPI().getAction("purge");
|
||||
|
||||
checkResponseStatusOk(response);
|
||||
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action.status");
|
||||
Assert.assertEquals(actionStatus, "scheduled");
|
||||
|
||||
DEFAULT_CORE_NAMES.forEach(core -> {
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action." + core + ".status");
|
||||
Assert.assertEquals(actionStatus, "scheduled");
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -566,7 +569,7 @@ public class SolrE2eAdminTest extends AbstractE2EFunctionalTest
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test(priority = 25)
|
||||
public void testPurgeCore() throws Exception
|
||||
public void testPurgeCore()
|
||||
{
|
||||
final Integer txid = 1;
|
||||
|
||||
@@ -578,7 +581,7 @@ public class SolrE2eAdminTest extends AbstractE2EFunctionalTest
|
||||
|
||||
checkResponseStatusOk(response);
|
||||
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action.status");
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action." + core + ".status");
|
||||
Assert.assertEquals(actionStatus, "scheduled");
|
||||
}
|
||||
catch (Exception e)
|
||||
@@ -599,9 +602,11 @@ public class SolrE2eAdminTest extends AbstractE2EFunctionalTest
|
||||
RestResponse response = restClient.withSolrAdminAPI().getAction("purge");
|
||||
|
||||
checkResponseStatusOk(response);
|
||||
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action.status");
|
||||
Assert.assertEquals(actionStatus, "scheduled");
|
||||
|
||||
DEFAULT_CORE_NAMES.forEach(core -> {
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action." + core + ".status");
|
||||
Assert.assertEquals(actionStatus, "scheduled");
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -661,9 +666,11 @@ public class SolrE2eAdminTest extends AbstractE2EFunctionalTest
|
||||
RestResponse response = restClient.withParams("txid=" + txid).withSolrAdminAPI().getAction("reindex");
|
||||
|
||||
checkResponseStatusOk(response);
|
||||
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action.status");
|
||||
Assert.assertEquals(actionStatus, "scheduled");
|
||||
|
||||
DEFAULT_CORE_NAMES.forEach(core -> {
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action." + core + ".status");
|
||||
Assert.assertEquals(actionStatus, "scheduled");
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -671,7 +678,7 @@ public class SolrE2eAdminTest extends AbstractE2EFunctionalTest
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test(priority = 30)
|
||||
public void testReindexCore() throws Exception
|
||||
public void testReindexCore()
|
||||
{
|
||||
Integer txid = 1;
|
||||
|
||||
@@ -683,7 +690,7 @@ public class SolrE2eAdminTest extends AbstractE2EFunctionalTest
|
||||
|
||||
checkResponseStatusOk(response);
|
||||
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action.status");
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action." + core + ".status");
|
||||
Assert.assertEquals(actionStatus, "scheduled");
|
||||
}
|
||||
catch (Exception e)
|
||||
@@ -705,12 +712,12 @@ public class SolrE2eAdminTest extends AbstractE2EFunctionalTest
|
||||
|
||||
checkResponseStatusOk(response);
|
||||
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action.status");
|
||||
Assert.assertEquals(actionStatus, "scheduled");
|
||||
|
||||
DEFAULT_CORE_NAMES.forEach(core -> {
|
||||
List<String> errorNodeList = response.getResponse().body().jsonPath().get("action." + core);
|
||||
Assert.assertEquals(errorNodeList, Arrays.asList(), "Expected no error nodes,");
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action." + core + ".status");
|
||||
Assert.assertEquals(actionStatus, "scheduled");
|
||||
|
||||
List<String> errorNodeList = response.getResponse().body().jsonPath().get("action." + core + "['Error Nodes']");
|
||||
Assert.assertEquals(errorNodeList, emptyList(), "Expected no error nodes,");
|
||||
});
|
||||
}
|
||||
|
||||
@@ -719,7 +726,7 @@ public class SolrE2eAdminTest extends AbstractE2EFunctionalTest
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test(priority = 32)
|
||||
public void testRetryCore() throws Exception
|
||||
public void testRetryCore()
|
||||
{
|
||||
DEFAULT_CORE_NAMES.forEach(core -> {
|
||||
|
||||
@@ -729,11 +736,11 @@ public class SolrE2eAdminTest extends AbstractE2EFunctionalTest
|
||||
|
||||
checkResponseStatusOk(response);
|
||||
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action.status");
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action." + core + ".status");
|
||||
Assert.assertEquals(actionStatus, "scheduled");
|
||||
|
||||
List<String> errorNodeList = response.getResponse().body().jsonPath().get("action." + core);
|
||||
Assert.assertEquals(errorNodeList, Arrays.asList(), "Expected no error nodes,");
|
||||
List<String> errorNodeList = response.getResponse().body().jsonPath().get("action." + core + "['Error Nodes']");
|
||||
Assert.assertEquals(errorNodeList, emptyList(), "Expected no error nodes,");
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
@@ -755,9 +762,10 @@ public class SolrE2eAdminTest extends AbstractE2EFunctionalTest
|
||||
RestResponse response = restClient.withParams("txid=" + txid).withSolrAdminAPI().getAction("index");
|
||||
|
||||
checkResponseStatusOk(response);
|
||||
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action.status");
|
||||
Assert.assertEquals(actionStatus, "scheduled");
|
||||
DEFAULT_CORE_NAMES.forEach(core -> {
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action." + core + ".status");
|
||||
Assert.assertEquals(actionStatus, "scheduled");
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -777,7 +785,7 @@ public class SolrE2eAdminTest extends AbstractE2EFunctionalTest
|
||||
|
||||
checkResponseStatusOk(response);
|
||||
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action.status");
|
||||
String actionStatus = response.getResponse().body().jsonPath().get("action." + core + ".status");
|
||||
Assert.assertEquals(actionStatus, "scheduled");
|
||||
}
|
||||
catch (Exception e)
|
||||
|
16
e2e-test/src/test/resources/PipelineSuite.xml
Normal file
16
e2e-test/src/test/resources/PipelineSuite.xml
Normal file
@@ -0,0 +1,16 @@
|
||||
<!DOCTYPE suite SYSTEM "http://testng.org/testng-1.0.dtd" >
|
||||
|
||||
<suite name="PipelineSuite" verbose="6" preserve-order="true">
|
||||
|
||||
<listeners>
|
||||
<listener class-name="org.alfresco.utility.report.log.LogsListener"/>
|
||||
<listener class-name="org.alfresco.utility.report.HtmlReportListener"/>
|
||||
</listeners>
|
||||
|
||||
<test name="Pipeline">
|
||||
<classes>
|
||||
<class name="org.alfresco.test.search.functional.searchServices.search.SearchSimpleCasesTest" />
|
||||
</classes>
|
||||
</test>
|
||||
|
||||
</suite>
|
16
e2e-test/src/test/resources/SearchFingerprintSuite.xml
Normal file
16
e2e-test/src/test/resources/SearchFingerprintSuite.xml
Normal file
@@ -0,0 +1,16 @@
|
||||
<!DOCTYPE suite SYSTEM "http://testng.org/testng-1.0.dtd" >
|
||||
|
||||
<suite name="SearchSuite" verbose="6" preserve-order="true">
|
||||
|
||||
<listeners>
|
||||
<listener class-name="org.alfresco.utility.report.log.LogsListener"/>
|
||||
<listener class-name="org.alfresco.utility.report.HtmlReportListener"/>
|
||||
</listeners>
|
||||
|
||||
<test name="E2E-Fingerprint-SearchServices">
|
||||
<packages>
|
||||
<package name="org.alfresco.test.search.functional.searchServices.search.fingerprint" />
|
||||
</packages>
|
||||
</test>
|
||||
|
||||
</suite>
|
@@ -18,6 +18,8 @@
|
||||
<package name="org.alfresco.test.search.functional.searchServices.*">
|
||||
<exclude name="org.alfresco.test.search.functional.searchServices.cmis"/>
|
||||
<exclude name="org.alfresco.test.search.functional.searchServices.search.rm"/>
|
||||
<exclude name="org.alfresco.test.search.functional.searchServices.search.fingerprint"/>
|
||||
<exclude name="org.alfresco.test.search.functional.searchServices.search.crosslocale"/>
|
||||
</package>
|
||||
</packages>
|
||||
<!-- Despite this class is included in Search Services package, needs to be excluded in order to be executed as the last one -->
|
||||
|
17
e2e-test/src/test/resources/SearchSuiteCrossLocale.xml
Normal file
17
e2e-test/src/test/resources/SearchSuiteCrossLocale.xml
Normal file
@@ -0,0 +1,17 @@
|
||||
<!DOCTYPE suite SYSTEM "http://testng.org/testng-1.0.dtd" >
|
||||
|
||||
<suite name="SearchSuite" verbose="6" preserve-order="true">
|
||||
|
||||
<listeners>
|
||||
<listener class-name="org.alfresco.utility.report.log.LogsListener"/>
|
||||
<listener class-name="org.alfresco.utility.report.HtmlReportListener"/>
|
||||
</listeners>
|
||||
|
||||
<!-- This tests require Cross Locale configuration for Search Services -->
|
||||
<test name="E2E-CrossLocaleSearchServices">
|
||||
<packages>
|
||||
<package name="org.alfresco.test.search.functional.searchServices.search.crosslocale"/>
|
||||
</packages>
|
||||
</test>
|
||||
|
||||
</suite>
|
@@ -19,9 +19,11 @@ rest.rmPath=alfresco/api/-default-/public/gs/versions/1
|
||||
solr.scheme=http
|
||||
solr.server=localhost
|
||||
solr.port=8083
|
||||
solr.secret=secret
|
||||
|
||||
#Solr Indexing Time
|
||||
solrWaitTimeInSeconds=20
|
||||
# Use 1s and 60 attempts, see AbstractE2EFunctionalTest.SEARCH_MAX_ATTEMPTS
|
||||
solrWaitTimeInSeconds=1
|
||||
|
||||
# credentials
|
||||
admin.user=admin
|
||||
|
412
pom.xml
412
pom.xml
@@ -4,40 +4,303 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-super-pom</artifactId>
|
||||
<version>10</version>
|
||||
<version>12</version>
|
||||
</parent>
|
||||
<artifactId>alfresco-search-and-insight-parent</artifactId>
|
||||
<version>2.0.0-SNAPSHOT</version>
|
||||
<version>2.0.9</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>Alfresco Search And Insight Parent</name>
|
||||
<name>Alfresco Search And Insight Engine</name>
|
||||
<distributionManagement>
|
||||
<repository>
|
||||
<id>alfresco-internal</id>
|
||||
<id>alfresco-enterprise-releases</id>
|
||||
<url>https://artifacts.alfresco.com/nexus/content/repositories/enterprise-releases/</url>
|
||||
</repository>
|
||||
<snapshotRepository>
|
||||
<id>alfresco-internal-snapshots</id>
|
||||
<id>alfresco-enterprise-snapshots</id>
|
||||
<url>https://artifacts.alfresco.com/nexus/content/repositories/enterprise-snapshots/</url>
|
||||
</snapshotRepository>
|
||||
</distributionManagement>
|
||||
<scm>
|
||||
<connection>scm:git:ssh://git@github.com/Alfresco/InsightEngine.git</connection>
|
||||
<developerConnection>scm:git:ssh://git@github.com/Alfresco/InsightEngine.git</developerConnection>
|
||||
<url>scm:git:ssh://git@github.com/Alfresco/InsightEngine.git</url>
|
||||
<tag>HEAD</tag>
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>atlassian</id>
|
||||
<url>https://packages.atlassian.com/maven-3rdparty/</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
<checksumPolicy>warn</checksumPolicy>
|
||||
</snapshots>
|
||||
<releases>
|
||||
<enabled>true</enabled>
|
||||
<checksumPolicy>warn</checksumPolicy>
|
||||
</releases>
|
||||
</repository>
|
||||
</repositories>
|
||||
<scm>
|
||||
<connection>scm:git:https://github.com/Alfresco/InsightEngine.git</connection>
|
||||
<developerConnection>scm:git:https://github.com/Alfresco/InsightEngine.git</developerConnection>
|
||||
<url>https://github.com/Alfresco/InsightEngine</url>
|
||||
<tag>2.0.9</tag>
|
||||
</scm>
|
||||
<properties>
|
||||
<java.version>11</java.version>
|
||||
<maven.build.sourceVersion>11</maven.build.sourceVersion>
|
||||
<solr.base.version>6.6.5</solr.base.version>
|
||||
<solr.version>${solr.base.version}-patched.2</solr.version>
|
||||
<solr.version>${solr.base.version}-patched.11</solr.version>
|
||||
<!-- The location to download the solr zip file from. -->
|
||||
<!-- <solr.zip>https://archive.apache.org/dist/lucene/solr/${solr.version}/solr-${solr.version}.zip</solr.zip> -->
|
||||
<!-- Solr startup scripts do not work with any Java version higher than 9 so the scripts have been patched -->
|
||||
<solr.zip>https://artifacts.alfresco.com/nexus/content/repositories/public/org/apache/solr/solr/solr-${solr.version}/solr-solr-${solr.version}.zip</solr.zip>
|
||||
<solr.directory>${project.build.directory}/solr-${solr.version}</solr.directory>
|
||||
<license-maven-plugin.version>2.0.1</license-maven-plugin.version>
|
||||
<licenseName>enterprise</licenseName>
|
||||
<license.update.dryrun>true</license.update.dryrun>
|
||||
<license.update.copyright>false</license.update.copyright>
|
||||
|
||||
<dependency.alfresco.xml-factory.version>1.3</dependency.alfresco.xml-factory.version>
|
||||
<dependency.alfresco-data-model.version>17.190</dependency.alfresco-data-model.version>
|
||||
|
||||
<dependency.jackson.version>2.15.2</dependency.jackson.version>
|
||||
|
||||
<dependency.google.guava.version>32.1.1-jre</dependency.google.guava.version>
|
||||
<dependency.apache-commons-compress.version>1.23.0</dependency.apache-commons-compress.version>
|
||||
<dependency.apache-commons-lang3.version>3.12.0</dependency.apache-commons-lang3.version>
|
||||
<dependency.apache-commons-lang.version>2.6</dependency.apache-commons-lang.version>
|
||||
<dependency.jakarta.xml.bind-api.version>3.0.1</dependency.jakarta.xml.bind-api.version>
|
||||
<dependency.restlet.version>2.3.12</dependency.restlet.version>
|
||||
<dependency.httpclient.version>4.5.14</dependency.httpclient.version>
|
||||
<dependency.codehaus.jackson.version>1.9.14-atlassian-6</dependency.codehaus.jackson.version>
|
||||
<dependency.carrotsearch.thirdpaty.simple-xml-safe.version>2.7.1</dependency.carrotsearch.thirdpaty.simple-xml-safe.version>
|
||||
|
||||
<dependency.xpp3.version>1.1.4c</dependency.xpp3.version>
|
||||
<dependency.jaxen.version>1.2.0</dependency.jaxen.version>
|
||||
<dependency.jaxb-xjc.version>4.0.3</dependency.jaxb-xjc.version>
|
||||
|
||||
<dependency.calcite.version>1.32.0</dependency.calcite.version>
|
||||
<dependency.slf4j.version>1.7.36</dependency.slf4j.version>
|
||||
<dependency.cxf.version>3.4.8</dependency.cxf.version>
|
||||
|
||||
<dependency.javax.servlet.api.version>3.1.0</dependency.javax.servlet.api.version>
|
||||
|
||||
<dependency.junit.version>4.13.2</dependency.junit.version>
|
||||
<dependency.mockito.version>5.4.0</dependency.mockito.version>
|
||||
<dependency.carrotsearch.randomizedtesting.version>2.8.1</dependency.carrotsearch.randomizedtesting.version>
|
||||
<dependency.chemistry.opencmis.version>1.1.0</dependency.chemistry.opencmis.version>
|
||||
<dependency.protobuf.version>3.23.4</dependency.protobuf.version>
|
||||
<dependency.jayway.jsonpath.version>2.8.0</dependency.jayway.jsonpath.version>
|
||||
<dependency.janino.version>3.1.10</dependency.janino.version>
|
||||
</properties>
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.codehaus.janino</groupId>
|
||||
<artifactId>commons-compiler</artifactId>
|
||||
<version>${dependency.janino.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.codehaus.janino</groupId>
|
||||
<artifactId>janino</artifactId>
|
||||
<version>${dependency.janino.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.jayway.jsonpath</groupId>
|
||||
<artifactId>json-path</artifactId>
|
||||
<version>${dependency.jayway.jsonpath.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<version>${dependency.protobuf.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
<version>${dependency.jackson.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
<version>${dependency.jackson.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>${dependency.jackson.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.dataformat</groupId>
|
||||
<artifactId>jackson-dataformat-smile</artifactId>
|
||||
<version>${dependency.jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.servlet</groupId>
|
||||
<artifactId>javax.servlet-api</artifactId>
|
||||
<version>${dependency.javax.servlet.api.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.carrotsearch.thirdparty</groupId>
|
||||
<artifactId>simple-xml-safe</artifactId>
|
||||
<version>${dependency.carrotsearch.thirdpaty.simple-xml-safe.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.calcite</groupId>
|
||||
<artifactId>calcite-core</artifactId>
|
||||
<version>${dependency.calcite.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.xml.bind</groupId>
|
||||
<artifactId>jaxb-xjc</artifactId>
|
||||
<version>${dependency.jaxb-xjc.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>jaxen</groupId>
|
||||
<artifactId>jaxen</artifactId>
|
||||
<version>${dependency.jaxen.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>xpp3</groupId>
|
||||
<artifactId>xpp3</artifactId>
|
||||
<version>${dependency.xpp3.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>${dependency.google.guava.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-xmlfactory</artifactId>
|
||||
<version>${dependency.alfresco.xml-factory.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-lang</groupId>
|
||||
<artifactId>commons-lang</artifactId>
|
||||
<version>${dependency.apache-commons-lang.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-core</artifactId>
|
||||
<version>${dependency.cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-bindings-soap</artifactId>
|
||||
<version>${dependency.cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-bindings-xml</artifactId>
|
||||
<version>${dependency.cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-databinding-jaxb</artifactId>
|
||||
<version>${dependency.cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-frontend-jaxws</artifactId>
|
||||
<version>${dependency.cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-frontend-simple</artifactId>
|
||||
<version>${dependency.cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-transports-http</artifactId>
|
||||
<version>${dependency.cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-ws-addr</artifactId>
|
||||
<version>${dependency.cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-ws-policy</artifactId>
|
||||
<version>${dependency.cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-wsdl</artifactId>
|
||||
<version>${dependency.cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>${dependency.apache-commons-lang3.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<version>${dependency.slf4j.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-reload4j</artifactId>
|
||||
<version>${dependency.slf4j.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-compress</artifactId>
|
||||
<version>${dependency.apache-commons-compress.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>jakarta.xml.bind</groupId>
|
||||
<artifactId>jakarta.xml.bind-api</artifactId>
|
||||
<version>${dependency.jakarta.xml.bind-api.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.restlet.jee</groupId>
|
||||
<artifactId>org.restlet</artifactId>
|
||||
<version>${dependency.restlet.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.restlet.jee</groupId>
|
||||
<artifactId>org.restlet.ext.servlet</artifactId>
|
||||
<version>${dependency.restlet.version}</version>
|
||||
</dependency>
|
||||
<!-- spring framework is defined in "search-services" and "insight-engine" because "e2e-test" uses different versions -->
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient</artifactId>
|
||||
<version>${dependency.httpclient.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.codehaus.jackson</groupId>
|
||||
<artifactId>jackson-core-asl</artifactId>
|
||||
<version>${dependency.codehaus.jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.codehaus.jackson</groupId>
|
||||
<artifactId>jackson-mapper-asl</artifactId>
|
||||
<version>${dependency.codehaus.jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>${dependency.junit.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<version>${dependency.mockito.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.carrotsearch.randomizedtesting</groupId>
|
||||
<artifactId>randomizedtesting-runner</artifactId>
|
||||
<version>${dependency.carrotsearch.randomizedtesting.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.chemistry.opencmis</groupId>
|
||||
<artifactId>chemistry-opencmis-client-impl</artifactId>
|
||||
<version>${dependency.chemistry.opencmis.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
<modules>
|
||||
<module>search-services</module>
|
||||
<module>insight-engine</module>
|
||||
@@ -45,17 +308,6 @@
|
||||
</modules>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.8.1</version>
|
||||
<configuration>
|
||||
<release>${java.version}</release>
|
||||
<target>${java.version}</target>
|
||||
<showWarnings>true</showWarnings>
|
||||
<showDeprecation>true</showDeprecation>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-failsafe-plugin</artifactId>
|
||||
@@ -75,57 +327,83 @@
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>license-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<addJavaLicenseAfterPackage>false</addJavaLicenseAfterPackage>
|
||||
<organizationName>Alfresco Software Limited</organizationName>
|
||||
<canUpdateCopyright>true</canUpdateCopyright>
|
||||
<failOnMissingHeader>true</failOnMissingHeader>
|
||||
<failOnNotUptodateHeader>true</failOnNotUptodateHeader>
|
||||
<licenseResolver>classpath://alfresco</licenseResolver>
|
||||
<licenseName>${licenseName}</licenseName>
|
||||
<dryRun>${license.update.dryrun}</dryRun>
|
||||
<roots>
|
||||
<root>src</root>
|
||||
</roots>
|
||||
<includes>
|
||||
<include>**/*.java</include>
|
||||
<include>**/*.jsp</include>
|
||||
</includes>
|
||||
<!-- Classes derivated from SOLR Source code include the Apache License header -->
|
||||
<excludes>
|
||||
<exclude>**/org/alfresco/solr/component/AsyncBuildSuggestComponent.java</exclude>
|
||||
<exclude>**/org/apache/solr/client/solrj/io/sql/ConnectionImpl.java</exclude>
|
||||
<exclude>**/org/apache/solr/client/solrj/io/sql/DatabaseMetaDataImpl.java</exclude>
|
||||
<exclude>**/org/apache/solr/client/solrj/io/sql/ResultSetImpl.java</exclude>
|
||||
<exclude>**/org/apache/solr/client/solrj/io/sql/StatementImpl.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrAggregate.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrEnumerator.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrFilter.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrMethod.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrProject.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrRel.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrRules.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrSort.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrTable.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrTableScan.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrToEnumerableConverter.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrToEnumerableConverterRule.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/stream/FacetStream.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/stream/JDBCStream.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/stream/JSONTupleStream.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/stream/LimitStream.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/stream/StatsStream.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/stream/StreamHandler.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/stream/TimeSeriesStream.java</exclude>
|
||||
</excludes>
|
||||
</configuration>
|
||||
<version>${license-maven-plugin.version}</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>third-party-licenses</id>
|
||||
<goals>
|
||||
<goal>add-third-party</goal>
|
||||
</goals>
|
||||
<phase>generate-resources</phase>
|
||||
<!-- Override of dual licenses is not working due to https://github.com/mojohaus/license-maven-plugin/issues/386 -->
|
||||
<!-- Overrides are listed in README instead -->
|
||||
<configuration>
|
||||
<failOnMissing>true</failOnMissing>
|
||||
<excludedScopes>provided,test</excludedScopes>
|
||||
<excludedGroups>org.alfresco</excludedGroups>
|
||||
<licenseMerges>
|
||||
<licenseMerge>The Apache Software License, Version 2.0|Apache License, Version 2.0|Apache Public License 2.0|Apache 2.0|The Apache License, Version 2.0|Apache License 2.0|Apache Software License - Version 2.0|Apache 2|Apache License Version 2.0</licenseMerge>
|
||||
<licenseMerge>BSD 3 Clause|3-Clause BSD License|BSD 3-clause License</licenseMerge>
|
||||
<licenseMerge>Common Development and Distribution License|CDDL|CDDL+GPL License|CDDL + GPLv2 with classpath exception</licenseMerge>
|
||||
<licenseMerge>Eclipse Distribution License, Version 1.0|Eclipse Distribution License - v 1.0|EDL 1.0</licenseMerge>
|
||||
<licenseMerge>Eclipse Public License, Version 1.0|Eclipse Public License - Version 1.0|Eclipse Public License - v 1.0</licenseMerge>
|
||||
<licenseMerge>Eclipse Public License, Version 2.0|Eclipse Public License - Version 2.0|EPL 2.0</licenseMerge>
|
||||
<licenseMerge>Creative Commons License|CC0|Public Domain, per Creative Commons CC0</licenseMerge>
|
||||
<licenseMerge>The MIT License|MIT License</licenseMerge>
|
||||
</licenseMerges>
|
||||
<overrideUrl>https://raw.githubusercontent.com/Alfresco/third-party-license-overrides/master/override-THIRD-PARTY.properties</overrideUrl>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>check-licenses</id>
|
||||
<phase>compile</phase>
|
||||
<goals>
|
||||
<goal>update-file-header</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<addJavaLicenseAfterPackage>false</addJavaLicenseAfterPackage>
|
||||
<organizationName>Alfresco Software Limited</organizationName>
|
||||
<canUpdateCopyright>${license.update.copyright}</canUpdateCopyright>
|
||||
<failOnMissingHeader>true</failOnMissingHeader>
|
||||
<failOnNotUptodateHeader>true</failOnNotUptodateHeader>
|
||||
<licenseResolver>classpath://alfresco</licenseResolver>
|
||||
<licenseName>${licenseName}</licenseName>
|
||||
<dryRun>${license.update.dryrun}</dryRun>
|
||||
<roots>
|
||||
<root>src</root>
|
||||
</roots>
|
||||
<includes>
|
||||
<include>**/*.java</include>
|
||||
<include>**/*.jsp</include>
|
||||
</includes>
|
||||
<!-- Classes derivated from SOLR Source code include the Apache License header -->
|
||||
<excludes>
|
||||
<exclude>**/org/alfresco/solr/component/AsyncBuildSuggestComponent.java</exclude>
|
||||
<exclude>**/org/apache/solr/client/solrj/io/sql/ConnectionImpl.java</exclude>
|
||||
<exclude>**/org/apache/solr/client/solrj/io/sql/DatabaseMetaDataImpl.java</exclude>
|
||||
<exclude>**/org/apache/solr/client/solrj/io/sql/ResultSetImpl.java</exclude>
|
||||
<exclude>**/org/apache/solr/client/solrj/io/sql/StatementImpl.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrAggregate.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrEnumerator.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrFilter.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrMethod.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrProject.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrRel.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrRules.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrSort.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrTable.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrTableScan.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrToEnumerableConverter.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/sql/SolrToEnumerableConverterRule.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/stream/FacetStream.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/stream/JDBCStream.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/stream/JSONTupleStream.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/stream/LimitStream.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/stream/StatsStream.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/stream/StreamHandler.java</exclude>
|
||||
<exclude>**/org/alfresco/solr/stream/TimeSeriesStream.java</exclude>
|
||||
</excludes>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
<dependencies>
|
||||
|
@@ -158,10 +158,10 @@ $ unzip alfresco-search-services-*.zip
|
||||
$ cd alfresco-search-services
|
||||
```
|
||||
|
||||
Change default Alfresco Communication protocol to `none`.
|
||||
Change default Alfresco Communication protocol to `none`, and set `alfresco.allowUnauthenticatedSolrEndpoint` to `true`:
|
||||
|
||||
```bash
|
||||
$ sed -i 's/alfresco.secureComms=https/alfresco.secureComms=none/' solrhome/templates/rerank/conf/solrcore.properties
|
||||
$ sed -i 's/alfresco.secureComms=https/alfresco.secureComms=none\nalfresco.allowUnauthenticatedSolrEndpoint=true/' solrhome/templates/rerank/conf/solrcore.properties
|
||||
```
|
||||
|
||||
*Note* Above line is written in GNU sed, you can use `gsed` from Mac OS X or just edit the file with a Text Editor.
|
||||
@@ -293,8 +293,8 @@ The following environment variables are supported:
|
||||
| SEARCH_LOG_LEVEL | ERROR, WARN, INFO, DEBUG or TRACE | The root logger level. |
|
||||
| ENABLE_SPELLCHECK | true or false | Whether spellchecking is enabled or not. |
|
||||
| DISABLE_CASCADE_TRACKING | true or false | Whether cascade tracking is enabled or not. Disabling cascade tracking will improve performance, but result in some feature loss (e.g. path queries). |
|
||||
| ALFRESCO_SECURE_COMMS | https or none | Whether communication with the repository is secured. See below. |
|
||||
| SOLR_SSL_... | --- | These variables are also used to configure SSL. See below. |
|
||||
| ALFRESCO_SECURE_COMMS | secret or https | This property instructs Solr if it should enable Shared Secret authentication or mTLS authentication with HTTPS. See below. |
|
||||
|
||||
**Using Mutual Auth TLS (SSL)**
|
||||
|
||||
@@ -328,20 +328,39 @@ SOLR Web Console will be available at:
|
||||
|
||||
*Note* You must install the `browser.p12` certificate in your browser in order to access to this URL.
|
||||
|
||||
**Using Plain HTTP**
|
||||
**Using Shared Secret Authentication**
|
||||
|
||||
By default Docker image is using SSL, so it's required to add an environment variable `ALFRESCO_SECURE_COMMS=none` to use SOLR in plain HTTP mode.
|
||||
An alternative is to use a shared secret in order to secure repo <-> solr communication. You just need to set `ALFRESCO_SECURE_COMMS=secret` **AND** `JAVA_TOOL_OPTIONS="-Dalfresco.secureComms.secret=my_super_secret_secret"`.
|
||||
|
||||
By default, the SOLR Web Console will be available at:
|
||||
|
||||
[http://localhost:8983/solr](http://localhost:8983/solr)
|
||||
|
||||
but you can also start the Jetty server in SSL mode as explained above, in that case the SOLR Web Console will be available at:
|
||||
|
||||
[https://localhost:8983/solr](https://localhost:8983/solr)
|
||||
|
||||
*Note* You must install the `browser.p12` certificate in your browser in order to access to this URL.
|
||||
|
||||
In both cases, when trying to access the SOLR Web Console you will have to provide the `X-Alfresco-Search-Secret` header in the request, specifying as its value the same value that was used for the `-Dalfresco.secureComms.secret` property.
|
||||
You can do so natively on Safari through the `Dev Tools > Local Overrides` feature, or with a browser extension on Google Chrome/Firefox/Opera/Edge: [ModHeader](https://modheader.com/).
|
||||
|
||||
**Using Shared Secret Authentication**
|
||||
|
||||
By default Docker image is using SSL, so it's required to add an environment variable `ALFRESCO_SECURE_COMMS=secret` AND `JAVA_TOOL_OPTIONS="-Dalfresco.secureComms.secret=my_super_secret_secret"` to use SOLR with Shared Secret authentication.
|
||||
|
||||
To run the docker image:
|
||||
|
||||
```bash
|
||||
$ docker run -p 8983:8983 -e ALFRESCO_SECURE_COMMS=none -e SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive searchservices:develop
|
||||
$ docker run -p 8983:8983 -e ALFRESCO_SECURE_COMMS=secret -e SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive -e JAVA_TOOL_OPTIONS="-Dalfresco.secureComms.secret=my_super_secret_secret" searchservices:develop
|
||||
```
|
||||
|
||||
SOLR Web Console will be available at:
|
||||
|
||||
[http://localhost:8983/solr](http://localhost:8983/solr)
|
||||
|
||||
You will have to provide the `X-Alfresco-Search-Secret` header in the request, specifying as its value the same value that was used for the `-Dalfresco.secureComms.secret` property.
|
||||
|
||||
**Enabling YourKit Java Profiler**
|
||||
|
||||
This Docker Image includes [YourKit Java Profiler](https://www.yourkit.com/java/profiler/) server service. In order to enable this service, so the SOLR JVM can be inspected with the YourKit local program, additional configuration is required to set the YourKit `agentpath`. Mapping the exposed profiling port (10001 by default) is also required.
|
||||
@@ -369,13 +388,16 @@ solr6:
|
||||
SOLR_SOLR_HOST: "solr6"
|
||||
SOLR_SOLR_PORT: "8983"
|
||||
# HTTP settings
|
||||
ALFRESCO_SECURE_COMMS: "none"
|
||||
ALFRESCO_SECURE_COMMS: "secret"
|
||||
#Create the default alfresco and archive cores
|
||||
SOLR_CREATE_ALFRESCO_DEFAULTS: "alfresco,archive"
|
||||
SOLR_JAVA_MEM: "-Xms2g -Xmx2g"
|
||||
SOLR_OPTS: "
|
||||
-agentpath:/usr/local/YourKit-JavaProfiler-2019.8/bin/linux-x86-64/libyjpagent.so=port=10001,listen=all
|
||||
"
|
||||
JAVA_TOOL_OPTIONS: "
|
||||
-Dalfresco.secureComms.secret=my_super_secret_secret
|
||||
"
|
||||
ports:
|
||||
- 8083:8983 #Browser port
|
||||
- 10001:10001 #YourKit port
|
||||
@@ -400,7 +422,7 @@ During deployment time whenever Search Services or Insight Engine image starts,
|
||||
To run the docker image:
|
||||
|
||||
```bash
|
||||
$ docker run -p 8984:8983 -e REPLICATION_TYPE=slave -e ALFRESCO_SECURE_COMMS=none -e SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive searchservices:develop
|
||||
$ docker run -p 8984:8983 -e REPLICATION_TYPE=slave -e ALFRESCO_SECURE_COMMS=secret -e SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive -e JAVA_TOOL_OPTIONS="-Dalfresco.secureComms.secret=my_super_secret_secret" searchservices:develop
|
||||
```
|
||||
Solr-slave End point: [http://localhost:8984/solr](http://localhost:8984/solr)
|
||||
|
||||
@@ -408,7 +430,7 @@ To generate your own Docker-compose file please follow [generator-alfresco-docke
|
||||
|
||||
### Use Alfresco Search Services Docker Image with Docker Compose
|
||||
|
||||
Sample configuration in a Docker Compose file using **Plain HTTP** protocol to communicate with Alfresco Repository.
|
||||
Sample configuration in a Docker Compose file using **Shared Secret Authentication** to communicate with Alfresco Repository.
|
||||
|
||||
```
|
||||
solr6:
|
||||
@@ -422,10 +444,13 @@ solr6:
|
||||
SOLR_SOLR_HOST: "solr6"
|
||||
SOLR_SOLR_PORT: "8983"
|
||||
# HTTP settings
|
||||
ALFRESCO_SECURE_COMMS: "none"
|
||||
ALFRESCO_SECURE_COMMS: "secret"
|
||||
#Create the default alfresco and archive cores
|
||||
SOLR_CREATE_ALFRESCO_DEFAULTS: "alfresco,archive"
|
||||
SOLR_JAVA_MEM: "-Xms2g -Xmx2g"
|
||||
JAVA_TOOL_OPTIONS: "
|
||||
-Dalfresco.secureComms.secret=my_super_secret_secret
|
||||
"
|
||||
ports:
|
||||
- 8083:8983 #Browser port
|
||||
```
|
||||
@@ -434,6 +459,8 @@ SOLR Web Console will be available at:
|
||||
|
||||
[http://localhost:8983/solr](http://localhost:8983/solr)
|
||||
|
||||
You will have to provide the `X-Alfresco-Search-Secret` header in the request, specifying as its value the same value that was used for the `-Dalfresco.secureComms.secret` property.
|
||||
|
||||
|
||||
Sample configuration in a Docker Compose file using **Mutual Auth TLS (SSL)** protocol to communicate with Alfresco Repository.
|
||||
|
||||
|
@@ -66,10 +66,9 @@ def generate_fields(field_type, tokenized, string, cross_locale, sortable, sugge
|
||||
|
||||
if string:
|
||||
generated_fields.append(get_copy_field_xml(field, create_non_tokenized(prefix)))
|
||||
generated_fields.append(get_copy_field_xml(field, create_non_tokenized_cross_locale(prefix)))
|
||||
if sortable:
|
||||
generated_fields.append(get_copy_field_xml(field, create_sortable(prefix)))
|
||||
if cross_locale:
|
||||
generated_fields.append(get_copy_field_xml(field, create_non_tokenized_cross_locale(prefix)))
|
||||
|
||||
if suggestable:
|
||||
generated_fields.append(get_copy_field_xml(field, "suggest"))
|
||||
@@ -107,7 +106,6 @@ def generate_text(file):
|
||||
file.write("\n")
|
||||
file.write("\n")
|
||||
|
||||
|
||||
def main():
|
||||
file = open(output_file, "w")
|
||||
file.write('<fields>\n')
|
||||
|
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-search-parent</artifactId>
|
||||
<version>2.0.0-SNAPSHOT</version>
|
||||
<version>2.0.9</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -15,12 +15,16 @@
|
||||
<dependency>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-solrclient-lib</artifactId>
|
||||
<version>2.0.0-SNAPSHOT</version>
|
||||
<version>2.0.9</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>servlet-api</artifactId>
|
||||
<groupId>javax.servlet</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
@@ -38,151 +42,337 @@
|
||||
<version>${solr.version}</version>
|
||||
<scope>provided</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.dataformat</groupId>
|
||||
<artifactId>jackson-dataformat-smile</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.codehaus.janino</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.calcite</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.calcite.avatica</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>jdk.tools</groupId>
|
||||
<artifactId>jdk.tools</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.solr</groupId>
|
||||
<artifactId>solr-analysis-extras</artifactId>
|
||||
<version>${solr.version}</version>
|
||||
<scope>provided</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.dataformat</groupId>
|
||||
<artifactId>jackson-dataformat-smile</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.codehaus.janino</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.calcite</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.calcite.avatica</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.calcite.avatica</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.solr</groupId>
|
||||
<artifactId>solr-langid</artifactId>
|
||||
<version>${solr.version}</version>
|
||||
<scope>provided</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.dataformat</groupId>
|
||||
<artifactId>jackson-dataformat-smile</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.codehaus.janino</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.adobe.xmp</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.calcite</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.calcite.avatica</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>xercesImpl</artifactId>
|
||||
<groupId>xerces</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.xmlbeans</groupId>
|
||||
<artifactId>xmlbeans</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcmail-jdk15on</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcprov-jdk15on</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.tika</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.poi</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.solr</groupId>
|
||||
<artifactId>solr-clustering</artifactId>
|
||||
<version>${solr.version}</version>
|
||||
<scope>provided</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.dataformat</groupId>
|
||||
<artifactId>jackson-dataformat-smile</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.codehaus.janino</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.calcite</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.calcite.avatica</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.simpleframework</groupId>
|
||||
<artifactId>simple-xml</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<!-- replace simple-xml from solr-clustering with simple-xml-safe -->
|
||||
<dependency>
|
||||
<groupId>com.carrotsearch.thirdparty</groupId>
|
||||
<artifactId>simple-xml-safe</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<version>${slf4j.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
<version>${slf4j.version}</version>
|
||||
<artifactId>slf4j-reload4j</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>jaxen</groupId>
|
||||
<artifactId>jaxen</artifactId>
|
||||
<version>1.2.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.sun.xml.bind</groupId>
|
||||
<artifactId>jaxb-xjc</artifactId>
|
||||
<version>2.3.2</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>3.10</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-core</artifactId>
|
||||
<version>${cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-bindings-soap</artifactId>
|
||||
<version>${cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-bindings-xml</artifactId>
|
||||
<version>${cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-databinding-jaxb</artifactId>
|
||||
<version>${cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-frontend-jaxws</artifactId>
|
||||
<version>${cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-frontend-simple</artifactId>
|
||||
<version>${cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-transports-http</artifactId>
|
||||
<version>${cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-ws-addr</artifactId>
|
||||
<version>${cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-ws-policy</artifactId>
|
||||
<version>${cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-wsdl</artifactId>
|
||||
<version>${cxf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>xpp3</groupId>
|
||||
<artifactId>xpp3</artifactId>
|
||||
<version>1.1.4c</version>
|
||||
</dependency>
|
||||
|
||||
<!-- DATE Functions (YEAR, MONTH, ...) are broken in Calcite 1.11.0 (default
|
||||
version provided by SOLR 6.6.x) Upgrading manually Calcite version to 1.12.0
|
||||
to support this kind of functions -->
|
||||
<dependency>
|
||||
<groupId>org.apache.calcite</groupId>
|
||||
<artifactId>calcite-core</artifactId>
|
||||
<version>1.12.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.calcite</groupId>
|
||||
<artifactId>calcite-linq4j</artifactId>
|
||||
<version>1.12.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.alfresco</groupId>
|
||||
<artifactId>alfresco-xmlfactory</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.calcite</groupId>
|
||||
<artifactId>calcite-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.codehaus.janino</groupId>
|
||||
<artifactId>commons-compiler</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.codehaus.janino</groupId>
|
||||
<artifactId>janino</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.jayway.jsonpath</groupId>
|
||||
<artifactId>json-path</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-lang</groupId>
|
||||
<artifactId>commons-lang</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Test dependencies -->
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>4.13</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<version>3.4.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
@@ -191,58 +381,57 @@
|
||||
<artifactId>solr-test-framework</artifactId>
|
||||
<version>${solr.version}</version>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.dataformat</groupId>
|
||||
<artifactId>jackson-dataformat-smile</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.codehaus.janino</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.calcite</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.calcite.avatica</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.carrotsearch.randomizedtesting</groupId>
|
||||
<artifactId>randomizedtesting-runner</artifactId>
|
||||
<version>2.7.8</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.chemistry.opencmis</groupId>
|
||||
<artifactId>chemistry-opencmis-client-impl</artifactId>
|
||||
<version>1.1.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>alfresco-public</id>
|
||||
<url>https://artifacts.alfresco.com/nexus/content/groups/public</url>
|
||||
<releases>
|
||||
<enabled>true</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>alfresco-public-snapshots</id>
|
||||
<url>https://artifacts.alfresco.com/nexus/content/groups/public-snapshots</url>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>central</id>
|
||||
<name>Central Repository</name>
|
||||
<url>https://repo.maven.apache.org/maven2</url>
|
||||
<layout>default</layout>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>maven-restlet</id>
|
||||
<name>Public online Restlet repository</name>
|
||||
<url>http://maven.restlet.talend.com</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
<build>
|
||||
<finalName>alfresco-solr</finalName>
|
||||
<plugins>
|
||||
@@ -260,6 +449,16 @@
|
||||
</excludes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>license-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>third-party-licenses</id>
|
||||
<phase>none</phase>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-resources-plugin</artifactId>
|
||||
<executions>
|
||||
@@ -274,6 +473,24 @@
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>src/main/resources/solr/instance</directory>
|
||||
<excludes>
|
||||
<exclude>conf/shared.properties</exclude>
|
||||
</excludes>
|
||||
</resource>
|
||||
</resources>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>copy-test-conf</id>
|
||||
<phase>generate-test-resources</phase>
|
||||
<goals>
|
||||
<goal>copy-resources</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<outputDirectory>${project.build.testOutputDirectory}/test-files/conf</outputDirectory>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>src/test/resources/test-files/conf</directory>
|
||||
</resource>
|
||||
</resources>
|
||||
</configuration>
|
||||
@@ -410,6 +627,7 @@
|
||||
<plugin>
|
||||
<groupId>org.jacoco</groupId>
|
||||
<artifactId>jacoco-maven-plugin</artifactId>
|
||||
<version>0.8.10</version>
|
||||
<configuration>
|
||||
<excludes>
|
||||
<exclude>**/AnnotationWriter.*</exclude>
|
||||
|
@@ -162,29 +162,39 @@ public class AlfrescoCollatableTextFieldType extends StrField
|
||||
return values[slot];
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the term string value for supplied doc
|
||||
*
|
||||
* @param doc
|
||||
* the document id that was hit
|
||||
* @param term
|
||||
* a {@link BytesRef} object representing an UTF8 encoded term in the index
|
||||
*
|
||||
* @return the term value in string format
|
||||
*/
|
||||
private String findBestValue(int doc, BytesRef term)
|
||||
{
|
||||
if (term.length == 0 && docsWithField != null && docsWithField.get(doc) == false)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
// Converts the stored bytes (as UTF8) to string
|
||||
String withLocale = term.utf8ToString();
|
||||
|
||||
// split strin into MLText object
|
||||
if (withLocale == null)
|
||||
{
|
||||
return withLocale;
|
||||
}
|
||||
else if (withLocale.startsWith("\u0000"))
|
||||
|
||||
if (withLocale != null && withLocale.startsWith("\u0000"))
|
||||
{
|
||||
// the array can either be [, locale, term value] or just [, locale] depending whether the term value used
|
||||
// to perform the sort is empty or not
|
||||
String[] parts = withLocale.split("\u0000");
|
||||
return parts[2];
|
||||
}
|
||||
else
|
||||
{
|
||||
return withLocale;
|
||||
|
||||
if (parts != null && parts.length == 3)
|
||||
{
|
||||
return parts[2];
|
||||
}
|
||||
}
|
||||
|
||||
return withLocale;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
|
@@ -32,20 +32,20 @@ import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.solr.adapters.IOpenBitSet;
|
||||
import org.alfresco.solr.client.SOLRAPIClientFactory;
|
||||
import org.alfresco.solr.config.ConfigUtil;
|
||||
import org.alfresco.solr.tracker.AbstractTracker;
|
||||
import org.alfresco.solr.tracker.AclTracker;
|
||||
import org.alfresco.solr.tracker.AbstractShardInformationPublisher;
|
||||
import org.alfresco.solr.tracker.ActivatableTracker;
|
||||
import org.alfresco.solr.tracker.ShardStatePublisher;
|
||||
import org.alfresco.solr.tracker.DBIDRangeRouter;
|
||||
import org.alfresco.solr.tracker.DocRouter;
|
||||
import org.alfresco.solr.tracker.IndexHealthReport;
|
||||
import org.alfresco.solr.tracker.MetadataTracker;
|
||||
import org.alfresco.solr.tracker.NodeStatePublisher;
|
||||
import org.alfresco.solr.tracker.SolrTrackerScheduler;
|
||||
import org.alfresco.solr.tracker.Tracker;
|
||||
import org.alfresco.solr.tracker.TrackerRegistry;
|
||||
import org.alfresco.solr.utils.Utils;
|
||||
import org.alfresco.util.Pair;
|
||||
import org.alfresco.util.shard.ExplicitShardingPolicy;
|
||||
import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.params.CoreAdminParams;
|
||||
@@ -81,6 +81,7 @@ import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.LongToIntFunction;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@@ -88,8 +89,8 @@ import static java.util.Arrays.asList;
|
||||
import static java.util.Arrays.stream;
|
||||
import static java.util.Optional.of;
|
||||
import static java.util.Optional.ofNullable;
|
||||
import static org.alfresco.repo.search.adaptor.lucene.QueryConstants.FIELD_INACLTXID;
|
||||
import static org.alfresco.repo.search.adaptor.lucene.QueryConstants.FIELD_INTXID;
|
||||
import static org.alfresco.repo.search.adaptor.QueryConstants.FIELD_INACLTXID;
|
||||
import static org.alfresco.repo.search.adaptor.QueryConstants.FIELD_INTXID;
|
||||
import static org.alfresco.solr.HandlerOfResources.extractCustomProperties;
|
||||
import static org.alfresco.solr.HandlerOfResources.getSafeBoolean;
|
||||
import static org.alfresco.solr.HandlerOfResources.getSafeLong;
|
||||
@@ -103,6 +104,8 @@ import static org.alfresco.solr.HandlerReportHelper.buildAclTxReport;
|
||||
import static org.alfresco.solr.HandlerReportHelper.buildNodeReport;
|
||||
import static org.alfresco.solr.HandlerReportHelper.buildTrackerReport;
|
||||
import static org.alfresco.solr.HandlerReportHelper.buildTxReport;
|
||||
import static org.alfresco.solr.InterceptorRegistry.registerSolrClientInterceptors;
|
||||
import static org.alfresco.solr.utils.Utils.isNotNullAndNotEmpty;
|
||||
import static org.alfresco.solr.utils.Utils.isNullOrEmpty;
|
||||
import static org.alfresco.solr.utils.Utils.notNullOrEmpty;
|
||||
|
||||
@@ -138,10 +141,10 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
|
||||
private static final String REPORT = "report";
|
||||
private static final String SUMMARY = "Summary";
|
||||
private static final String ARG_ACLTXID = "acltxid";
|
||||
static final String ARG_ACLTXID = "acltxid";
|
||||
static final String ARG_TXID = "txid";
|
||||
private static final String ARG_ACLID = "aclid";
|
||||
private static final String ARG_NODEID = "nodeid";
|
||||
static final String ARG_ACLID = "aclid";
|
||||
static final String ARG_NODEID = "nodeid";
|
||||
private static final String ARG_QUERY = "query";
|
||||
private static final String DATA_DIR_ROOT = "data.dir.root";
|
||||
public static final String ALFRESCO_DEFAULTS = "create.alfresco.defaults";
|
||||
@@ -167,7 +170,8 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
private static final String ACTION_STATUS_ERROR = "error";
|
||||
static final String ACTION_STATUS_SCHEDULED = "scheduled";
|
||||
static final String ACTION_STATUS_NOT_SCHEDULED = "notScheduled";
|
||||
|
||||
static final String ADDITIONAL_INFO = "additionalInfo";
|
||||
static final String WARNING = "WARNING";
|
||||
static final String DRY_RUN_PARAMETER_NAME = "dryRun";
|
||||
static final String FROM_TX_COMMIT_TIME_PARAMETER_NAME = "fromTxCommitTime";
|
||||
static final String TO_TX_COMMIT_TIME_PARAMETER_NAME = "toTxCommitTime";
|
||||
@@ -197,7 +201,7 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
TrackerRegistry trackerRegistry;
|
||||
ConcurrentHashMap<String, InformationServer> informationServers;
|
||||
|
||||
private final static List<String> CORE_PARAMETER_NAMES = asList(CoreAdminParams.CORE, "coreName", "index");
|
||||
final static List<String> CORE_PARAMETER_NAMES = asList(CoreAdminParams.CORE, "coreName", "index");
|
||||
|
||||
public AlfrescoCoreAdminHandler()
|
||||
{
|
||||
@@ -217,6 +221,9 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
String createDefaultCores = ConfigUtil.locateProperty(ALFRESCO_DEFAULTS, "");
|
||||
int numShards = Integer.parseInt(ConfigUtil.locateProperty(NUM_SHARDS, "1"));
|
||||
String shardIds = ConfigUtil.locateProperty(SHARD_IDS, null);
|
||||
registerSolrClientInterceptors();
|
||||
|
||||
|
||||
if (createDefaultCores != null && !createDefaultCores.isEmpty())
|
||||
{
|
||||
Thread thread = new Thread(() ->
|
||||
@@ -228,6 +235,7 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates new default cores based on the "createDefaultCores" String passed in.
|
||||
*
|
||||
@@ -311,7 +319,6 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
|
||||
AlfrescoSolrDataModel.getInstance().close();
|
||||
SOLRAPIClientFactory.close();
|
||||
MultiThreadedHttpConnectionManager.shutdownAll();
|
||||
|
||||
coreNames().forEach(trackerRegistry::removeTrackersForCore);
|
||||
informationServers.clear();
|
||||
@@ -495,6 +502,14 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
ofNullable(params.get("resource"))
|
||||
.orElse("log4j.properties")));
|
||||
break;
|
||||
case "ENABLE-INDEXING":
|
||||
case "ENABLEINDEXING":
|
||||
rsp.add(ACTION_LABEL, actionEnableIndexing(params));
|
||||
break;
|
||||
case "DISABLE-INDEXING":
|
||||
case "DISABLEINDEXING":
|
||||
rsp.add(ACTION_LABEL, actionDisableIndexing(params));
|
||||
break;
|
||||
default:
|
||||
super.handleCustomAction(req, rsp);
|
||||
break;
|
||||
@@ -1010,12 +1025,12 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
coreNames().stream()
|
||||
.filter(coreName -> requestedCoreName == null || coreName.equals(requestedCoreName))
|
||||
.filter(trackerRegistry::hasTrackersForCore)
|
||||
.map(coreName -> new Pair<>(coreName, coreStatePublisher(coreName)))
|
||||
.filter(coreNameAndPublisher -> coreNameAndPublisher.getSecond() != null)
|
||||
.forEach(coreNameAndPublisher ->
|
||||
.map(coreName -> new Pair<>(coreName, nodeStatusChecker(coreName)))
|
||||
.filter(coreNameAndNodeChecker -> coreNameAndNodeChecker.getSecond() != null)
|
||||
.forEach(coreNameAndNodeChecker ->
|
||||
report.add(
|
||||
coreNameAndPublisher.getFirst(),
|
||||
buildNodeReport(coreNameAndPublisher.getSecond(), nodeid)));
|
||||
coreNameAndNodeChecker.getFirst(),
|
||||
buildNodeReport(coreNameAndNodeChecker.getSecond(), nodeid)));
|
||||
return report;
|
||||
}
|
||||
|
||||
@@ -1388,8 +1403,6 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
* - toCalTx, optional: to ACL transaction Id to filter report results
|
||||
*
|
||||
* - report.core: multiple Objects with the details of the report ("core" is the name of the Core)
|
||||
*
|
||||
* @throws JSONException
|
||||
*/
|
||||
private NamedList<Object> actionREPORT(SolrParams params) throws JSONException
|
||||
{
|
||||
@@ -1444,16 +1457,30 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
* @return Response including the action result:
|
||||
* - status: scheduled, as it will be executed by Trackers on the next maintenance operation
|
||||
*/
|
||||
private NamedList<Object> actionPURGE(SolrParams params)
|
||||
NamedList<Object> actionPURGE(SolrParams params)
|
||||
{
|
||||
final NamedList<Object> response = new SimpleOrderedMap<>();
|
||||
Consumer<String> purgeOnSpecificCore = coreName -> {
|
||||
final MetadataTracker metadataTracker = trackerRegistry.getTrackerForCore(coreName, MetadataTracker.class);
|
||||
final AclTracker aclTracker = trackerRegistry.getTrackerForCore(coreName, AclTracker.class);
|
||||
|
||||
apply(params, ARG_TXID, metadataTracker::addTransactionToPurge);
|
||||
apply(params, ARG_ACLTXID, aclTracker::addAclChangeSetToPurge);
|
||||
apply(params, ARG_NODEID, metadataTracker::addNodeToPurge);
|
||||
apply(params, ARG_ACLID, aclTracker::addAclToPurge);
|
||||
final NamedList<Object> coreResponse = new SimpleOrderedMap<>();
|
||||
|
||||
if (metadataTracker.isEnabled() & aclTracker.isEnabled())
|
||||
{
|
||||
apply(params, ARG_TXID, metadataTracker::addTransactionToPurge);
|
||||
apply(params, ARG_ACLTXID, aclTracker::addAclChangeSetToPurge);
|
||||
apply(params, ARG_NODEID, metadataTracker::addNodeToPurge);
|
||||
apply(params, ARG_ACLID, aclTracker::addAclToPurge);
|
||||
coreResponse.add(ACTION_STATUS_LABEL, ACTION_STATUS_SCHEDULED);
|
||||
}
|
||||
else
|
||||
{
|
||||
coreResponse.add(ACTION_STATUS_LABEL, ACTION_STATUS_NOT_SCHEDULED);
|
||||
coreResponse.add(ADDITIONAL_INFO, "Trackers have been disabled: the purge request cannot be executed; please enable indexing and then resubmit this command.");
|
||||
}
|
||||
|
||||
response.add(coreName, coreResponse);
|
||||
};
|
||||
|
||||
String requestedCoreName = coreName(params);
|
||||
@@ -1463,8 +1490,11 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
.filter(this::isMasterOrStandalone)
|
||||
.forEach(purgeOnSpecificCore);
|
||||
|
||||
NamedList<Object> response = new SimpleOrderedMap<>();
|
||||
response.add(ACTION_STATUS_LABEL, ACTION_STATUS_SCHEDULED);
|
||||
if (response.size() == 0)
|
||||
{
|
||||
addAlertMessage(response);
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
@@ -1484,18 +1514,31 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
* @return Response including the action result:
|
||||
* - action.status: scheduled, as it will be executed by Trackers on the next maintenance operation
|
||||
*/
|
||||
private NamedList<Object> actionREINDEX(SolrParams params)
|
||||
NamedList<Object> actionREINDEX(SolrParams params)
|
||||
{
|
||||
final NamedList<Object> response = new SimpleOrderedMap<>();
|
||||
Consumer<String> reindexOnSpecificCore = coreName -> {
|
||||
final MetadataTracker metadataTracker = trackerRegistry.getTrackerForCore(coreName, MetadataTracker.class);
|
||||
final AclTracker aclTracker = trackerRegistry.getTrackerForCore(coreName, AclTracker.class);
|
||||
final NamedList<Object> coreResponse = new SimpleOrderedMap<>();
|
||||
|
||||
apply(params, ARG_TXID, metadataTracker::addTransactionToReindex);
|
||||
apply(params, ARG_ACLTXID, aclTracker::addAclChangeSetToReindex);
|
||||
apply(params, ARG_NODEID, metadataTracker::addNodeToReindex);
|
||||
apply(params, ARG_ACLID, aclTracker::addAclToReindex);
|
||||
if (metadataTracker.isEnabled() & aclTracker.isEnabled())
|
||||
{
|
||||
apply(params, ARG_TXID, metadataTracker::addTransactionToReindex);
|
||||
apply(params, ARG_ACLTXID, aclTracker::addAclChangeSetToReindex);
|
||||
apply(params, ARG_NODEID, metadataTracker::addNodeToReindex);
|
||||
apply(params, ARG_ACLID, aclTracker::addAclToReindex);
|
||||
|
||||
ofNullable(params.get(ARG_QUERY)).ifPresent(metadataTracker::addQueryToReindex);
|
||||
coreResponse.add(ACTION_STATUS_LABEL, ACTION_STATUS_SCHEDULED);
|
||||
ofNullable(params.get(ARG_QUERY)).ifPresent(metadataTracker::addQueryToReindex);
|
||||
}
|
||||
else
|
||||
{
|
||||
coreResponse.add(ACTION_STATUS_LABEL, ACTION_STATUS_NOT_SCHEDULED);
|
||||
coreResponse.add(ADDITIONAL_INFO, "Trackers have been disabled: the REINDEX request cannot be executed; please enable indexing and then resubmit this command.");
|
||||
}
|
||||
|
||||
response.add(coreName, coreResponse);
|
||||
};
|
||||
|
||||
String requestedCoreName = coreName(params);
|
||||
@@ -1505,8 +1548,11 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
.filter(this::isMasterOrStandalone)
|
||||
.forEach(reindexOnSpecificCore);
|
||||
|
||||
NamedList<Object> response = new SimpleOrderedMap<>();
|
||||
response.add(ACTION_STATUS_LABEL, ACTION_STATUS_SCHEDULED);
|
||||
if (response.size() == 0)
|
||||
{
|
||||
addAlertMessage(response);
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
@@ -1520,35 +1566,41 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
* - action.status: scheduled, as it will be executed by Trackers on the next maintenance operation
|
||||
* - core: list of Document Ids with error that are going to reindexed
|
||||
*/
|
||||
private NamedList<Object> actionRETRY(SolrParams params)
|
||||
NamedList<Object> actionRETRY(SolrParams params)
|
||||
{
|
||||
NamedList<Object> response = new SimpleOrderedMap<>();
|
||||
|
||||
final Consumer<String> retryOnSpecificCore = coreName -> {
|
||||
MetadataTracker tracker = trackerRegistry.getTrackerForCore(coreName, MetadataTracker.class);
|
||||
InformationServer srv = informationServers.get(coreName);
|
||||
final NamedList<Object> coreResponse = new SimpleOrderedMap<>();
|
||||
|
||||
try
|
||||
if (tracker.isEnabled())
|
||||
{
|
||||
for (Long nodeid : srv.getErrorDocIds())
|
||||
try
|
||||
{
|
||||
tracker.addNodeToReindex(nodeid);
|
||||
for (Long nodeid : srv.getErrorDocIds())
|
||||
{
|
||||
tracker.addNodeToReindex(nodeid);
|
||||
}
|
||||
coreResponse.add("Error Nodes", srv.getErrorDocIds());
|
||||
coreResponse.add(ACTION_STATUS_LABEL, ACTION_STATUS_SCHEDULED);
|
||||
} catch (Exception exception)
|
||||
{
|
||||
LOGGER.error("I/O Exception while adding Node to reindex.", exception);
|
||||
coreResponse.add(ACTION_STATUS_LABEL, ACTION_STATUS_ERROR);
|
||||
coreResponse.add(ACTION_ERROR_MESSAGE_LABEL, exception.getMessage());
|
||||
coreResponse.add(ACTION_STATUS_LABEL, ACTION_STATUS_NOT_SCHEDULED);
|
||||
}
|
||||
response.add(coreName, srv.getErrorDocIds());
|
||||
}
|
||||
catch (Exception exception)
|
||||
else
|
||||
{
|
||||
LOGGER.error("I/O Exception while adding Node to reindex.", exception);
|
||||
response.add(ACTION_STATUS_LABEL, ACTION_STATUS_ERROR);
|
||||
response.add(ACTION_ERROR_MESSAGE_LABEL, exception.getMessage());
|
||||
|
||||
coreResponse.add(ACTION_STATUS_LABEL, ACTION_STATUS_NOT_SCHEDULED);
|
||||
coreResponse.add(ADDITIONAL_INFO, "Trackers have been disabled: the RETRY request cannot be executed; please enable indexing and then resubmit this command.");
|
||||
}
|
||||
};
|
||||
|
||||
if (Objects.equals(response.get(ACTION_STATUS_LABEL), ACTION_STATUS_ERROR))
|
||||
{
|
||||
return response;
|
||||
}
|
||||
response.add(coreName, coreResponse);
|
||||
};
|
||||
|
||||
String requestedCoreName = coreName(params);
|
||||
|
||||
@@ -1557,7 +1609,11 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
.filter(this::isMasterOrStandalone)
|
||||
.forEach(retryOnSpecificCore);
|
||||
|
||||
response.add(ACTION_STATUS_LABEL, ACTION_STATUS_SCHEDULED);
|
||||
if (response.size() == 0)
|
||||
{
|
||||
addAlertMessage(response);
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
@@ -1576,16 +1632,29 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
* @return Response including the action result:
|
||||
* - action.status: scheduled, as it will be executed by Trackers on the next maintenance operation
|
||||
*/
|
||||
private NamedList<Object> actionINDEX(SolrParams params)
|
||||
NamedList<Object> actionINDEX(SolrParams params)
|
||||
{
|
||||
final NamedList<Object> response = new SimpleOrderedMap<>();
|
||||
Consumer<String> indexOnSpecificCore = coreName -> {
|
||||
final MetadataTracker metadataTracker = trackerRegistry.getTrackerForCore(coreName, MetadataTracker.class);
|
||||
final AclTracker aclTracker = trackerRegistry.getTrackerForCore(coreName, AclTracker.class);
|
||||
final NamedList<Object> coreResponse = new SimpleOrderedMap<>();
|
||||
|
||||
apply(params, ARG_TXID, metadataTracker::addTransactionToIndex);
|
||||
apply(params, ARG_ACLTXID, aclTracker::addAclChangeSetToIndex);
|
||||
apply(params, ARG_NODEID, metadataTracker::addNodeToIndex);
|
||||
apply(params, ARG_ACLID, aclTracker::addAclToIndex);
|
||||
if (metadataTracker.isEnabled() & aclTracker.isEnabled())
|
||||
{
|
||||
apply(params, ARG_TXID, metadataTracker::addTransactionToIndex);
|
||||
apply(params, ARG_ACLTXID, aclTracker::addAclChangeSetToIndex);
|
||||
apply(params, ARG_NODEID, metadataTracker::addNodeToIndex);
|
||||
apply(params, ARG_ACLID, aclTracker::addAclToIndex);
|
||||
coreResponse.add(ACTION_STATUS_LABEL, ACTION_STATUS_SCHEDULED);
|
||||
}
|
||||
else
|
||||
{
|
||||
coreResponse.add(ACTION_STATUS_LABEL, ACTION_STATUS_NOT_SCHEDULED);
|
||||
coreResponse.add(ADDITIONAL_INFO, "Trackers have been disabled: the INDEX request cannot be executed; please enable indexing and then resubmit this command.");
|
||||
}
|
||||
|
||||
response.add(coreName, coreResponse);
|
||||
};
|
||||
|
||||
String requestedCoreName = coreName(params);
|
||||
@@ -1595,11 +1664,24 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
.filter(this::isMasterOrStandalone)
|
||||
.forEach(indexOnSpecificCore);
|
||||
|
||||
NamedList<Object> response = new SimpleOrderedMap<>();
|
||||
response.add(ACTION_STATUS_LABEL, ACTION_STATUS_SCHEDULED);
|
||||
if (response.size() == 0)
|
||||
{
|
||||
addAlertMessage(response);
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
NamedList<Object> actionDisableIndexing(SolrParams params) throws JSONException
|
||||
{
|
||||
return executeTrackerSubsystemLifecycleAction(params, this::disableIndexingOnSpecificCore);
|
||||
}
|
||||
|
||||
NamedList<Object> actionEnableIndexing(SolrParams params) throws JSONException
|
||||
{
|
||||
return executeTrackerSubsystemLifecycleAction(params, this::enableIndexingOnSpecificCore);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find transactions and acls missing or duplicated in the cores and
|
||||
* add them to be reindexed on the next maintenance operation
|
||||
@@ -1644,24 +1726,30 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
boolean dryRun = params.getBool(DRY_RUN_PARAMETER_NAME, true);
|
||||
int maxTransactionsToSchedule = getMaxTransactionToSchedule(params);
|
||||
|
||||
MetadataTracker metadataTracker = trackerRegistry.getTrackerForCore(requestedCoreName, MetadataTracker.class);
|
||||
AclTracker aclTracker = trackerRegistry.getTrackerForCore(requestedCoreName, AclTracker.class);
|
||||
final boolean actualDryRun = dryRun | (metadataTracker == null || metadataTracker.isDisabled()) || (aclTracker == null || aclTracker.isDisabled());
|
||||
|
||||
LOGGER.debug("FIX Admin request on core {}, parameters: " +
|
||||
FROM_TX_COMMIT_TIME_PARAMETER_NAME + " = {}, " +
|
||||
TO_TX_COMMIT_TIME_PARAMETER_NAME + " = {}, " +
|
||||
DRY_RUN_PARAMETER_NAME + " = {}, " +
|
||||
MAX_TRANSACTIONS_TO_SCHEDULE_PARAMETER_NAME + " = {}",
|
||||
"actualDryRun = {} " +
|
||||
MAX_TRANSACTIONS_TO_SCHEDULE_PARAMETER_NAME + " = {}",
|
||||
requestedCoreName,
|
||||
ofNullable(fromTxCommitTime).map(Object::toString).orElse("N.A."),
|
||||
ofNullable(toTxCommitTime).map(Object::toString).orElse("N.A."),
|
||||
dryRun,
|
||||
actualDryRun,
|
||||
maxTransactionsToSchedule);
|
||||
|
||||
coreNames().stream()
|
||||
.filter(coreName -> requestedCoreName == null || coreName.equals(requestedCoreName))
|
||||
.filter(coreName -> coreName.equals(requestedCoreName))
|
||||
.filter(this::isMasterOrStandalone)
|
||||
.forEach(coreName ->
|
||||
wrapper.response.add(
|
||||
coreName,
|
||||
fixOnSpecificCore(coreName, fromTxCommitTime, toTxCommitTime, dryRun, maxTransactionsToSchedule)));
|
||||
fixOnSpecificCore(coreName, fromTxCommitTime, toTxCommitTime, actualDryRun, maxTransactionsToSchedule)));
|
||||
|
||||
if (wrapper.response.size() > 0)
|
||||
{
|
||||
@@ -1671,7 +1759,14 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
ofNullable(toTxCommitTime).ifPresent(value -> wrapper.response.add(TO_TX_COMMIT_TIME_PARAMETER_NAME, value));
|
||||
|
||||
wrapper.response.add(MAX_TRANSACTIONS_TO_SCHEDULE_PARAMETER_NAME, maxTransactionsToSchedule);
|
||||
wrapper.response.add(ACTION_STATUS_LABEL, dryRun ? ACTION_STATUS_NOT_SCHEDULED : ACTION_STATUS_SCHEDULED);
|
||||
wrapper.response.add(ACTION_STATUS_LABEL, actualDryRun ? ACTION_STATUS_NOT_SCHEDULED : ACTION_STATUS_SCHEDULED);
|
||||
|
||||
// the user wanted a real execution (dryRun = false) but the trackers are disabled.
|
||||
// that adds a message in the response just to inform the user we didn't schedule anything (i.e. we forced a dryRun)
|
||||
if (!dryRun && actualDryRun)
|
||||
{
|
||||
wrapper.response.add(ADDITIONAL_INFO, "Trackers are disabled: a (dryRun = true) has been forced. As consequence of that nothing has been scheduled.");
|
||||
}
|
||||
}
|
||||
|
||||
return wrapper.response;
|
||||
@@ -1701,6 +1796,8 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
try
|
||||
{
|
||||
MetadataTracker metadataTracker = trackerRegistry.getTrackerForCore(coreName, MetadataTracker.class);
|
||||
AclTracker aclTracker = trackerRegistry.getTrackerForCore(coreName, AclTracker.class);
|
||||
|
||||
final IndexHealthReport metadataTrackerIndexHealthReport =
|
||||
metadataTracker.checkIndex(null, fromTxCommitTime, toTxCommitTime);
|
||||
|
||||
@@ -1715,7 +1812,6 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
dryRun,
|
||||
maxTransactionsToSchedule);
|
||||
|
||||
AclTracker aclTracker = trackerRegistry.getTrackerForCore(coreName, AclTracker.class);
|
||||
final IndexHealthReport aclTrackerIndexHealthReport =
|
||||
aclTracker.checkIndex(null, fromTxCommitTime, toTxCommitTime);
|
||||
|
||||
@@ -2004,16 +2100,19 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns, for the given core, the component which is in charge to publish the core state.
|
||||
* Returns, for the given core, the tracker which is in charge to check the nodes status.
|
||||
* Depending on the shard nature, master/standalone or slave, the tracker instance could be different.
|
||||
* In addition, also the information that a given tracker returns about a given node, could differ (e.g.
|
||||
* minimal in case of a slave node, detailed for master or standalone nodes).
|
||||
*
|
||||
* @param coreName the owning core name.
|
||||
* @return the component which is in charge to publish the core state.
|
||||
* @return the component which is in charge to check the nodes status.
|
||||
*/
|
||||
AbstractShardInformationPublisher coreStatePublisher(String coreName)
|
||||
AbstractTracker nodeStatusChecker(String coreName)
|
||||
{
|
||||
return ofNullable(trackerRegistry.getTrackerForCore(coreName, MetadataTracker.class))
|
||||
.map(AbstractShardInformationPublisher.class::cast)
|
||||
.orElse(trackerRegistry.getTrackerForCore(coreName, NodeStatePublisher.class));
|
||||
return isMasterOrStandalone(coreName)
|
||||
? trackerRegistry.getTrackerForCore(coreName, MetadataTracker.class)
|
||||
: trackerRegistry.getTrackerForCore(coreName, ShardStatePublisher.class);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -2036,7 +2135,7 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
private void addAlertMessage(NamedList<Object> report)
|
||||
{
|
||||
report.add(
|
||||
"WARNING",
|
||||
WARNING,
|
||||
"The requested endpoint is not available on the slave. " +
|
||||
"Please re-submit the same request to the corresponding Master");
|
||||
}
|
||||
@@ -2084,4 +2183,62 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
.map(Integer::parseInt)
|
||||
.orElse(Integer.MAX_VALUE)); // Last fallback if we don't have a request param and a value in configuration
|
||||
}
|
||||
}
|
||||
|
||||
NamedList<Object> disableIndexingOnSpecificCore(String coreName) {
|
||||
final NamedList<Object> coreResponse = new SimpleOrderedMap<>();
|
||||
trackerRegistry.getTrackersForCore(coreName)
|
||||
.stream()
|
||||
.filter(tracker -> tracker instanceof ActivatableTracker)
|
||||
.map(ActivatableTracker.class::cast)
|
||||
.peek(ActivatableTracker::disable)
|
||||
.forEach(tracker -> coreResponse.add(tracker.getType().toString(), tracker.isEnabled()));
|
||||
return coreResponse;
|
||||
}
|
||||
|
||||
NamedList<Object> enableIndexingOnSpecificCore(String coreName) {
|
||||
final NamedList<Object> coreResponse = new SimpleOrderedMap<>();
|
||||
trackerRegistry.getTrackersForCore(coreName)
|
||||
.stream()
|
||||
.filter(tracker -> tracker instanceof ActivatableTracker)
|
||||
.map(ActivatableTracker.class::cast)
|
||||
.peek(ActivatableTracker::enable)
|
||||
.forEach(tracker -> coreResponse.add(tracker.getType().toString(), tracker.isEnabled()));
|
||||
return coreResponse;
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal method used for executing the enable/disable indexing/tracking action.
|
||||
*
|
||||
* @param params the input request parameters. The only mandatory parameter is the core name
|
||||
* @param action this can be the "enable" or the "disable" action: it is an "impure" function which takes a core name
|
||||
* executes the enable/disable logic as part of its side-effect, and returns the action response.
|
||||
* @return the action response indicating the result of the enable/disable command on a specific core.
|
||||
* @see #CORE_PARAMETER_NAMES
|
||||
*/
|
||||
private NamedList<Object> executeTrackerSubsystemLifecycleAction(SolrParams params, Function<String, NamedList<Object>> action) throws JSONException
|
||||
{
|
||||
String requestedCoreName = coreName(params);
|
||||
final NamedList<Object> response = new SimpleOrderedMap<>();
|
||||
|
||||
if (isNotNullAndNotEmpty(requestedCoreName))
|
||||
{
|
||||
if (!coreNames().contains(requestedCoreName))
|
||||
{
|
||||
response.add(ACTION_ERROR_MESSAGE_LABEL, UNKNOWN_CORE_MESSAGE + requestedCoreName);
|
||||
return response;
|
||||
}
|
||||
|
||||
if (!isMasterOrStandalone(requestedCoreName)) {
|
||||
response.add(ACTION_ERROR_MESSAGE_LABEL, UNPROCESSABLE_REQUEST_ON_SLAVE_NODES);
|
||||
return response;
|
||||
}
|
||||
}
|
||||
|
||||
coreNames().stream()
|
||||
.filter(coreName -> requestedCoreName == null || coreName.equals(requestedCoreName))
|
||||
.filter(this::isMasterOrStandalone)
|
||||
.forEach(coreName -> response.add(coreName, action.apply(coreName)));
|
||||
|
||||
return response;
|
||||
}
|
||||
}
|
||||
|
@@ -46,6 +46,7 @@ import java.util.Properties;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.model.ContentModel;
|
||||
@@ -63,11 +64,10 @@ import org.alfresco.repo.dictionary.DictionaryDAOImpl;
|
||||
import org.alfresco.repo.dictionary.Facetable;
|
||||
import org.alfresco.repo.dictionary.IndexTokenisationMode;
|
||||
import org.alfresco.repo.dictionary.M2Model;
|
||||
import org.alfresco.repo.dictionary.M2ModelDiff;
|
||||
import org.alfresco.repo.dictionary.NamespaceDAO;
|
||||
import org.alfresco.repo.i18n.StaticMessageLookup;
|
||||
import org.alfresco.repo.search.MLAnalysisMode;
|
||||
import org.alfresco.repo.search.adaptor.lucene.QueryConstants;
|
||||
import org.alfresco.repo.search.adaptor.QueryConstants;
|
||||
import org.alfresco.repo.search.impl.QueryParserUtils;
|
||||
import org.alfresco.repo.search.impl.parsers.AlfrescoFunctionEvaluationContext;
|
||||
import org.alfresco.repo.search.impl.parsers.FTSParser;
|
||||
@@ -76,17 +76,15 @@ import org.alfresco.repo.search.impl.querymodel.Constraint;
|
||||
import org.alfresco.repo.search.impl.querymodel.QueryModelFactory;
|
||||
import org.alfresco.repo.search.impl.querymodel.QueryOptions.Connective;
|
||||
import org.alfresco.repo.search.impl.querymodel.impl.lucene.LuceneQueryBuilder;
|
||||
import org.alfresco.repo.search.impl.querymodel.impl.lucene.LuceneQueryBuilderContext;
|
||||
import org.alfresco.repo.search.impl.querymodel.impl.lucene.LuceneQueryModelFactory;
|
||||
import org.alfresco.repo.search.impl.querymodel.impl.lucene.QueryBuilderContext;
|
||||
import org.alfresco.repo.tenant.SingleTServiceImpl;
|
||||
import org.alfresco.repo.tenant.TenantService;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryException;
|
||||
import org.alfresco.service.cmr.dictionary.ModelDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.PropertyDefinition;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.cmr.search.SearchParameters;
|
||||
import org.alfresco.service.namespace.NamespaceException;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.solr.AlfrescoClientDataModelServicesFactory.DictionaryKey;
|
||||
@@ -117,6 +115,8 @@ import org.springframework.context.support.FileSystemXmlApplicationContext;
|
||||
|
||||
import static java.util.Optional.ofNullable;
|
||||
import static org.alfresco.solr.SolrInformationServer.UNIT_OF_TIME_DAY_FIELD_SUFFIX;
|
||||
import static org.alfresco.solr.SolrInformationServer.UNIT_OF_TIME_DAY_OF_WEEK_FIELD_SUFFIX;
|
||||
import static org.alfresco.solr.SolrInformationServer.UNIT_OF_TIME_DAY_OF_YEAR_FIELD_SUFFIX;
|
||||
import static org.alfresco.solr.SolrInformationServer.UNIT_OF_TIME_HOUR_FIELD_SUFFIX;
|
||||
import static org.alfresco.solr.SolrInformationServer.UNIT_OF_TIME_MINUTE_FIELD_SUFFIX;
|
||||
import static org.alfresco.solr.SolrInformationServer.UNIT_OF_TIME_MONTH_FIELD_SUFFIX;
|
||||
@@ -130,17 +130,42 @@ import static org.alfresco.solr.SolrInformationServer.UNIT_OF_TIME_YEAR_FIELD_SU
|
||||
*/
|
||||
public class AlfrescoSolrDataModel implements QueryConstants
|
||||
{
|
||||
public static class ContentPropertySpecs {
|
||||
public final String fieldName;
|
||||
public final String locale;
|
||||
|
||||
public ContentPropertySpecs(String fieldName, String locale) {
|
||||
this.fieldName = fieldName;
|
||||
this.locale = locale;
|
||||
}
|
||||
}
|
||||
|
||||
public static class TenantDbId
|
||||
{
|
||||
public String tenant;
|
||||
public Long dbId;
|
||||
|
||||
private List<ContentPropertySpecs> contentPropertySpecsList;
|
||||
|
||||
public Map<String, Object> optionalBag = new HashMap<>();
|
||||
|
||||
public void setProperty(String name, Object value)
|
||||
{
|
||||
optionalBag.put(name, value);
|
||||
}
|
||||
|
||||
public boolean hasAtLeastOneContentProperty() {
|
||||
return contentPropertySpecsList != null && !contentPropertySpecsList.isEmpty();
|
||||
}
|
||||
|
||||
public void addContentPropertiesSpecs(List<ContentPropertySpecs> specsList)
|
||||
{
|
||||
contentPropertySpecsList = Collections.unmodifiableList(specsList);
|
||||
}
|
||||
|
||||
public Stream<ContentPropertySpecs> contentPropertySpecsStream() {
|
||||
return contentPropertySpecsList.stream();
|
||||
}
|
||||
}
|
||||
|
||||
public enum FieldUse
|
||||
@@ -171,6 +196,8 @@ public class AlfrescoSolrDataModel implements QueryConstants
|
||||
UNIT_OF_TIME_MINUTE,
|
||||
UNIT_OF_TIME_HOUR,
|
||||
UNIT_OF_TIME_DAY,
|
||||
UNIT_OF_TIME_DAY_OF_WEEK,
|
||||
UNIT_OF_TIME_DAY_OF_YEAR,
|
||||
UNIT_OF_TIME_MONTH,
|
||||
UNIT_OF_TIME_QUARTER,
|
||||
UNIT_OF_TIME_YEAR
|
||||
@@ -184,6 +211,8 @@ public class AlfrescoSolrDataModel implements QueryConstants
|
||||
UNIT_OF_TIME_QUARTER_FIELD_SUFFIX,
|
||||
UNIT_OF_TIME_MONTH_FIELD_SUFFIX,
|
||||
UNIT_OF_TIME_DAY_FIELD_SUFFIX,
|
||||
UNIT_OF_TIME_DAY_OF_WEEK_FIELD_SUFFIX,
|
||||
UNIT_OF_TIME_DAY_OF_YEAR_FIELD_SUFFIX,
|
||||
UNIT_OF_TIME_HOUR_FIELD_SUFFIX,
|
||||
UNIT_OF_TIME_MINUTE_FIELD_SUFFIX,
|
||||
UNIT_OF_TIME_SECOND_FIELD_SUFFIX);
|
||||
@@ -861,8 +890,7 @@ public class AlfrescoSolrDataModel implements QueryConstants
|
||||
*/
|
||||
private void addExactSearchFields(PropertyDefinition propertyDefinition, IndexedField indexedField)
|
||||
{
|
||||
if ((propertyDefinition.getIndexTokenisationMode() == IndexTokenisationMode.FALSE)
|
||||
|| !(propertyDefinition.getIndexTokenisationMode() == IndexTokenisationMode.BOTH))
|
||||
if ((propertyDefinition.getIndexTokenisationMode() == IndexTokenisationMode.FALSE))
|
||||
{
|
||||
|
||||
indexedField.addField(getFieldForText(true, false, false, propertyDefinition), true, false);
|
||||
@@ -870,11 +898,16 @@ public class AlfrescoSolrDataModel implements QueryConstants
|
||||
}
|
||||
else
|
||||
{
|
||||
if(crossLocaleSearchDataTypes.contains(propertyDefinition.getDataType().getName()) || crossLocaleSearchProperties.contains(propertyDefinition.getName()))
|
||||
if (crossLocaleSearchDataTypes.contains(propertyDefinition.getDataType().getName())
|
||||
|| crossLocaleSearchProperties.contains(propertyDefinition.getName()))
|
||||
{
|
||||
indexedField.addField(getFieldForText(false, true, false, propertyDefinition), false, false);
|
||||
} else{
|
||||
throw new UnsupportedOperationException("Exact Term search is not supported unless you configure the field <"+propertyDefinition.getName()+"> for cross locale search");
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new UnsupportedOperationException(
|
||||
"Exact Term search is not supported unless you configure the field <"
|
||||
+ propertyDefinition.getName() + "> for cross locale search");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1133,6 +1166,10 @@ public class AlfrescoSolrDataModel implements QueryConstants
|
||||
return UNIT_OF_TIME_HOUR_FIELD_SUFFIX;
|
||||
case UNIT_OF_TIME_DAY:
|
||||
return UNIT_OF_TIME_DAY_FIELD_SUFFIX;
|
||||
case UNIT_OF_TIME_DAY_OF_WEEK:
|
||||
return UNIT_OF_TIME_DAY_OF_WEEK_FIELD_SUFFIX;
|
||||
case UNIT_OF_TIME_DAY_OF_YEAR:
|
||||
return UNIT_OF_TIME_DAY_OF_YEAR_FIELD_SUFFIX;
|
||||
case UNIT_OF_TIME_MONTH:
|
||||
return UNIT_OF_TIME_MONTH_FIELD_SUFFIX;
|
||||
case UNIT_OF_TIME_QUARTER:
|
||||
@@ -1353,22 +1390,9 @@ public class AlfrescoSolrDataModel implements QueryConstants
|
||||
|
||||
public boolean putModel(M2Model model)
|
||||
{
|
||||
Set<String> errors = validateModel(model);
|
||||
if(errors.isEmpty())
|
||||
{
|
||||
modelErrors.remove(model.getName());
|
||||
dictionaryDAO.putModelIgnoringConstraints(model);
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
if(!modelErrors.containsKey(model.getName()))
|
||||
{
|
||||
modelErrors.put(model.getName(), errors);
|
||||
log.warn(errors.iterator().next());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
modelErrors.remove(model.getName());
|
||||
dictionaryDAO.putModelIgnoringConstraints(model);
|
||||
return true;
|
||||
}
|
||||
|
||||
public void removeModel(QName modelQName)
|
||||
@@ -1540,7 +1564,7 @@ public class AlfrescoSolrDataModel implements QueryConstants
|
||||
|
||||
Set<String> selectorGroup = queryModelQuery.getSource().getSelectorGroups(functionContext).get(0);
|
||||
|
||||
LuceneQueryBuilderContext<Query, Sort, ParseException> luceneContext = getLuceneQueryBuilderContext(searchParameters, req, alternativeDictionary, FTSQueryParser.RerankPhase.SINGLE_PASS);
|
||||
QueryBuilderContext<Query, Sort, ParseException> luceneContext = getLuceneQueryBuilderContext(searchParameters, req, alternativeDictionary, FTSQueryParser.RerankPhase.SINGLE_PASS);
|
||||
@SuppressWarnings("unchecked")
|
||||
LuceneQueryBuilder<Query, Sort, ParseException> builder = (LuceneQueryBuilder<Query, Sort, ParseException>) queryModelQuery;
|
||||
org.apache.lucene.search.Query luceneQuery = builder.buildQuery(selectorGroup, luceneContext, functionContext);
|
||||
@@ -1548,7 +1572,7 @@ public class AlfrescoSolrDataModel implements QueryConstants
|
||||
return new ContextAwareQuery(luceneQuery, Boolean.TRUE.equals(isFilter) ? null : searchParameters);
|
||||
}
|
||||
|
||||
public LuceneQueryBuilderContext<Query, Sort, ParseException> getLuceneQueryBuilderContext(SearchParameters searchParameters, SolrQueryRequest req, String alternativeDictionary, FTSQueryParser.RerankPhase rerankPhase)
|
||||
public QueryBuilderContext<Query, Sort, ParseException> getLuceneQueryBuilderContext(SearchParameters searchParameters, SolrQueryRequest req, String alternativeDictionary, FTSQueryParser.RerankPhase rerankPhase)
|
||||
{
|
||||
return new Lucene4QueryBuilderContextSolrImpl(
|
||||
getDictionaryService(alternativeDictionary),
|
||||
@@ -1606,7 +1630,7 @@ public class AlfrescoSolrDataModel implements QueryConstants
|
||||
@SuppressWarnings("unchecked")
|
||||
LuceneQueryBuilder<Query, Sort, ParseException> builder = (LuceneQueryBuilder<Query, Sort, ParseException>) queryModelQuery;
|
||||
|
||||
LuceneQueryBuilderContext<Query, Sort, ParseException> luceneContext = getLuceneQueryBuilderContext(searchParameters, req, CMISStrictDictionaryService.DEFAULT, rerankPhase);
|
||||
QueryBuilderContext<Query, Sort, ParseException> luceneContext = getLuceneQueryBuilderContext(searchParameters, req, CMISStrictDictionaryService.DEFAULT, rerankPhase);
|
||||
|
||||
Set<String> selectorGroup = null;
|
||||
if (queryModelQuery.getSource() != null)
|
||||
@@ -1795,6 +1819,10 @@ public class AlfrescoSolrDataModel implements QueryConstants
|
||||
return SpecializedFieldType.UNIT_OF_TIME_HOUR;
|
||||
case UNIT_OF_TIME_DAY_FIELD_SUFFIX:
|
||||
return SpecializedFieldType.UNIT_OF_TIME_DAY;
|
||||
case UNIT_OF_TIME_DAY_OF_WEEK_FIELD_SUFFIX:
|
||||
return SpecializedFieldType.UNIT_OF_TIME_DAY_OF_WEEK;
|
||||
case UNIT_OF_TIME_DAY_OF_YEAR_FIELD_SUFFIX:
|
||||
return SpecializedFieldType.UNIT_OF_TIME_DAY_OF_YEAR;
|
||||
case UNIT_OF_TIME_MONTH_FIELD_SUFFIX:
|
||||
return SpecializedFieldType.UNIT_OF_TIME_MONTH;
|
||||
case UNIT_OF_TIME_QUARTER_FIELD_SUFFIX:
|
||||
@@ -1888,27 +1916,4 @@ public class AlfrescoSolrDataModel implements QueryConstants
|
||||
return solrSortField;
|
||||
}
|
||||
|
||||
private Set<String> validateModel(M2Model model)
|
||||
{
|
||||
try
|
||||
{
|
||||
dictionaryDAO.getCompiledModel(QName.createQName(model.getName(), namespaceDAO));
|
||||
}
|
||||
catch (DictionaryException | NamespaceException exception)
|
||||
{
|
||||
// No model to diff
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
// namespace unknown - no model
|
||||
List<M2ModelDiff> modelDiffs = dictionaryDAO.diffModelIgnoringConstraints(model);
|
||||
return modelDiffs.stream()
|
||||
.filter(diff -> diff.getDiffType().equals(M2ModelDiff.DIFF_UPDATED))
|
||||
.map(diff ->
|
||||
String.format("Model not updated: %s Failed to validate model update - found non-incrementally updated %s '%s'",
|
||||
model.getName(),
|
||||
diff.getElementType(),
|
||||
diff.getElementName()))
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
}
|
||||
}
|
@@ -28,7 +28,7 @@ package org.alfresco.solr;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import org.alfresco.repo.search.adaptor.lucene.QueryConstants;
|
||||
import org.alfresco.repo.search.adaptor.QueryConstants;
|
||||
import org.apache.solr.common.SolrDocumentList;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.params.SolrParams;
|
||||
|
@@ -105,34 +105,24 @@ class HandlerReportHelper
|
||||
}
|
||||
}
|
||||
|
||||
static NamedList<Object> buildNodeReport(MetadataTracker tracker, Node node) throws JSONException
|
||||
static NamedList<Object> buildNodeReport(AbstractTracker tracker, Node node) throws JSONException
|
||||
{
|
||||
NodeReport nodeReport = tracker.checkNode(node);
|
||||
|
||||
NamedList<Object> nr = new SimpleOrderedMap<>();
|
||||
nr.add("Node DBID", nodeReport.getDbid());
|
||||
nr.add("DB TX", nodeReport.getDbTx());
|
||||
nr.add("DB TX status", nodeReport.getDbNodeStatus().toString());
|
||||
if (nodeReport.getIndexLeafDoc() != null)
|
||||
{
|
||||
nr.add("Leaf tx in Index", nodeReport.getIndexLeafTx());
|
||||
}
|
||||
if (nodeReport.getIndexAuxDoc() != null)
|
||||
{
|
||||
nr.add("Aux tx in Index", nodeReport.getIndexAuxTx());
|
||||
}
|
||||
nr.add("Indexed Node Doc Count", nodeReport.getIndexedNodeDocCount());
|
||||
return nr;
|
||||
return buildNodeReport(tracker, node.getId());
|
||||
}
|
||||
|
||||
static NamedList<Object> buildNodeReport(AbstractShardInformationPublisher publisher, Long dbid) throws JSONException
|
||||
static NamedList<Object> buildNodeReport(AbstractTracker tracker, long dbid) throws JSONException
|
||||
{
|
||||
NodeReport nodeReport = publisher.checkNode(dbid);
|
||||
NodeReport nodeReport = tracker.checkNode(dbid);
|
||||
|
||||
NamedList<Object> payload = new SimpleOrderedMap<>();
|
||||
payload.add("Node DBID", nodeReport.getDbid());
|
||||
|
||||
if (publisher.isOnMasterOrStandalone())
|
||||
boolean isOnMasterOrStandaloneMode =
|
||||
tracker instanceof MetadataTracker
|
||||
|| (tracker instanceof ShardStatePublisher
|
||||
&& ((ShardStatePublisher)tracker).isOnMasterOrStandalone());
|
||||
|
||||
if (isOnMasterOrStandaloneMode)
|
||||
{
|
||||
ofNullable(nodeReport.getDbTx()).ifPresent(value -> payload.add("DB TX", value));
|
||||
ofNullable(nodeReport.getDbNodeStatus()).map(Object::toString).ifPresent(value -> payload.add("DB TX Status", value));
|
||||
@@ -159,6 +149,7 @@ class HandlerReportHelper
|
||||
AclTracker aclTracker = trackerRegistry.getTrackerForCore(coreName, AclTracker.class);
|
||||
IndexHealthReport aclReport = aclTracker.checkIndex(toAclTx, fromTime, toTime);
|
||||
NamedList<Object> ihr = new SimpleOrderedMap<>();
|
||||
ihr.add("ACL Tracker", (aclTracker.isEnabled() ? "enabled" : "disabled"));
|
||||
ihr.add("DB acl transaction count", aclReport.getDbAclTransactionCount());
|
||||
ihr.add("Count of duplicated acl transactions in the index", aclReport.getDuplicatedAclTxInIndex()
|
||||
.cardinality());
|
||||
@@ -188,6 +179,7 @@ class HandlerReportHelper
|
||||
// Metadata
|
||||
MetadataTracker metadataTracker = trackerRegistry.getTrackerForCore(coreName, MetadataTracker.class);
|
||||
IndexHealthReport metaReport = metadataTracker.checkIndex(toTx, fromTime, toTime);
|
||||
ihr.add("Metadata Tracker", (metadataTracker.isEnabled() ? "enabled" : "disabled"));
|
||||
ihr.add("DB transaction count", metaReport.getDbTransactionCount());
|
||||
ihr.add("Count of duplicated transactions in the index", metaReport.getDuplicatedTxInIndex()
|
||||
.cardinality());
|
||||
@@ -248,7 +240,7 @@ class HandlerReportHelper
|
||||
NamedList<Object> coreSummary = new SimpleOrderedMap<>();
|
||||
coreSummary.addAll((SimpleOrderedMap<Object>) srv.getCoreStats());
|
||||
|
||||
NodeStatePublisher statePublisher = trackerRegistry.getTrackerForCore(cname, NodeStatePublisher.class);
|
||||
ShardStatePublisher statePublisher = trackerRegistry.getTrackerForCore(cname, ShardStatePublisher.class);
|
||||
TrackerState trackerState = statePublisher.getTrackerState();
|
||||
long lastIndexTxCommitTime = trackerState.getLastIndexedTxCommitTime();
|
||||
|
||||
@@ -429,17 +421,12 @@ class HandlerReportHelper
|
||||
long remainingContentTimeMillis = 0;
|
||||
srv.addContentOutdatedAndUpdatedCounts(ftsSummary);
|
||||
long cleanCount =
|
||||
ofNullable(ftsSummary.get("Node count with FTSStatus Clean"))
|
||||
ofNullable(ftsSummary.get("Node count whose content is in sync"))
|
||||
.map(Number.class::cast)
|
||||
.map(Number::longValue)
|
||||
.orElse(0L);
|
||||
long dirtyCount =
|
||||
ofNullable(ftsSummary.get("Node count with FTSStatus Dirty"))
|
||||
.map(Number.class::cast)
|
||||
.map(Number::longValue)
|
||||
.orElse(0L);
|
||||
long newCount =
|
||||
ofNullable(ftsSummary.get("Node count with FTSStatus New"))
|
||||
ofNullable(ftsSummary.get("Node count whose content needs to be updated"))
|
||||
.map(Number.class::cast)
|
||||
.map(Number::longValue)
|
||||
.orElse(0L);
|
||||
@@ -450,12 +437,14 @@ class HandlerReportHelper
|
||||
.map(Number::longValue)
|
||||
.orElse(0L);
|
||||
|
||||
long contentYetToSee = nodesInIndex > 0 ? nodesToDo * (cleanCount + dirtyCount + newCount)/nodesInIndex : 0;
|
||||
if (dirtyCount + newCount + contentYetToSee > 0)
|
||||
|
||||
|
||||
long contentYetToSee = nodesInIndex > 0 ? nodesToDo * (cleanCount + dirtyCount)/nodesInIndex : 0;
|
||||
if (dirtyCount + contentYetToSee > 0)
|
||||
{
|
||||
// We now use the elapsed time as seen by the single thread farming out alc indexing
|
||||
double meanContentElapsedIndexTime = srv.getTrackerStats().getMeanContentElapsedIndexTime();
|
||||
remainingContentTimeMillis = (long) ((dirtyCount + newCount + contentYetToSee) * meanContentElapsedIndexTime);
|
||||
remainingContentTimeMillis = (long) ((dirtyCount + contentYetToSee) * meanContentElapsedIndexTime);
|
||||
}
|
||||
now = new Date();
|
||||
end = new Date(now.getTime() + remainingContentTimeMillis);
|
||||
@@ -485,6 +474,8 @@ class HandlerReportHelper
|
||||
}
|
||||
|
||||
ContentTracker contentTrkr = trackerRegistry.getTrackerForCore(cname, ContentTracker.class);
|
||||
CascadeTracker cascadeTracker = trackerRegistry.getTrackerForCore(cname, CascadeTracker.class);
|
||||
|
||||
TrackerState contentTrkrState = contentTrkr.getTrackerState();
|
||||
// Leave ModelTracker out of this check, because it is common
|
||||
boolean aTrackerIsRunning = aclTrkrState.isRunning() || metadataTrkrState.isRunning()
|
||||
@@ -498,6 +489,14 @@ class HandlerReportHelper
|
||||
coreSummary.add("MetadataTracker Active", metadataTrkrState.isRunning());
|
||||
coreSummary.add("AclTracker Active", aclTrkrState.isRunning());
|
||||
|
||||
coreSummary.add("ContentTracker Enabled", contentTrkr.isEnabled());
|
||||
coreSummary.add("MetadataTracker Enabled", metaTrkr.isEnabled());
|
||||
coreSummary.add("AclTracker Enabled", aclTrkr.isEnabled());
|
||||
if (cascadeTracker != null)
|
||||
{
|
||||
coreSummary.add("CascadeTracker Enabled", cascadeTracker.isEnabled());
|
||||
}
|
||||
|
||||
// TX
|
||||
|
||||
coreSummary.add("Last Index TX Commit Time", lastIndexTxCommitTime);
|
||||
|
@@ -0,0 +1,57 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Search Services
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2022 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
package org.alfresco.solr;
|
||||
|
||||
import org.alfresco.solr.io.interceptor.SharedSecretRequestInterceptor;
|
||||
import org.alfresco.solr.security.SecretSharedPropertyCollector;
|
||||
import org.apache.http.HttpRequestInterceptor;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class InterceptorRegistry
|
||||
{
|
||||
protected static final Logger LOGGER = LoggerFactory.getLogger(InterceptorRegistry.class);
|
||||
/**
|
||||
* Register the required {@link HttpRequestInterceptor}s
|
||||
*/
|
||||
public static void registerSolrClientInterceptors()
|
||||
{
|
||||
try
|
||||
{
|
||||
if (SecretSharedPropertyCollector.isCommsSecretShared())
|
||||
{
|
||||
SharedSecretRequestInterceptor.register();
|
||||
}
|
||||
}
|
||||
catch (Throwable t)
|
||||
{
|
||||
LOGGER.warn("It was not possible to add the Shared Secret Authentication interceptor. "
|
||||
+ "Please make sure to pass the required -Dalfresco.secureComms=secret and "
|
||||
+ "-Dalfresco.secureComms.secret=my-secret-value JVM args if trying to use Secret Authentication with Solr.");
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@@ -26,7 +26,7 @@
|
||||
|
||||
package org.alfresco.solr.component;
|
||||
|
||||
import static org.alfresco.repo.search.adaptor.lucene.QueryConstants.FIELD_LID;
|
||||
import static org.alfresco.repo.search.adaptor.QueryConstants.FIELD_LID;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
|
@@ -50,7 +50,7 @@ import static java.util.Optional.ofNullable;
|
||||
* Transform the fieldlist depending on the use of cached transformer:
|
||||
* [cached] -> add to the field list the translations of the fiels to the internal schema notation
|
||||
* otherwise -> modify the field list in order to contains a subset of the following fields:
|
||||
* id, DBID, _version_ and score
|
||||
* id, DBID, and score
|
||||
*/
|
||||
public class RewriteFieldListComponent extends SearchComponent {
|
||||
|
||||
@@ -63,7 +63,7 @@ public class RewriteFieldListComponent extends SearchComponent {
|
||||
{
|
||||
Set<String> fieldListSet = new HashSet<>();
|
||||
|
||||
Set<String> defaultNonCachedFields = Set.of("id","DBID", "_version_");
|
||||
Set<String> defaultNonCachedFields = Set.of("id","DBID");
|
||||
Set<String> allowedNonCachedFields = new HashSet<>(defaultNonCachedFields);
|
||||
allowedNonCachedFields.add("score");
|
||||
|
||||
@@ -79,7 +79,7 @@ public class RewriteFieldListComponent extends SearchComponent {
|
||||
|
||||
|
||||
// In case cache transformer is no set, we need to modify the field list in order return
|
||||
// only id, DBID and _version_ fields
|
||||
// only id, DBID fields
|
||||
if (!cacheTransformer){
|
||||
if (!solrReturnFields.wantsAllFields())
|
||||
{
|
||||
@@ -102,7 +102,7 @@ public class RewriteFieldListComponent extends SearchComponent {
|
||||
{
|
||||
fieldListSet.add("*");
|
||||
}
|
||||
else
|
||||
else if (solrReturnFields.getLuceneFieldNames() != null)
|
||||
{
|
||||
fieldListSet.addAll(solrReturnFields.getLuceneFieldNames().stream()
|
||||
.map( field -> AlfrescoSolrDataModel.getInstance()
|
||||
|
@@ -0,0 +1,98 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Search Services
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2022 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
package org.alfresco.solr.io.interceptor;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.alfresco.solr.security.SecretSharedPropertyCollector;
|
||||
import org.apache.http.HttpException;
|
||||
import org.apache.http.HttpRequest;
|
||||
import org.apache.http.HttpRequestInterceptor;
|
||||
import org.apache.http.message.BasicHeader;
|
||||
import org.apache.http.protocol.HttpContext;
|
||||
import org.apache.solr.client.solrj.impl.HttpClientUtil;
|
||||
|
||||
/**
|
||||
* This HttpRequestInterceptor adds the header that is required for Shared Secret Authentication with Solr
|
||||
*
|
||||
* @author Domenico Sibilio
|
||||
*/
|
||||
public class SharedSecretRequestInterceptor implements HttpRequestInterceptor
|
||||
{
|
||||
|
||||
private static volatile SharedSecretRequestInterceptor INSTANCE;
|
||||
|
||||
private SharedSecretRequestInterceptor()
|
||||
{
|
||||
}
|
||||
|
||||
/**
|
||||
* A typical thread-safe singleton implementation
|
||||
* @return The unique instance of this class
|
||||
*/
|
||||
public static SharedSecretRequestInterceptor getInstance()
|
||||
{
|
||||
if (INSTANCE == null)
|
||||
{
|
||||
synchronized (SharedSecretRequestInterceptor.class)
|
||||
{
|
||||
if (INSTANCE == null)
|
||||
{
|
||||
INSTANCE = new SharedSecretRequestInterceptor();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return INSTANCE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decorates the enclosing request with the Shared Secret Authentication header
|
||||
* @param httpRequest
|
||||
* @param httpContext
|
||||
* @throws HttpException
|
||||
* @throws IOException
|
||||
*/
|
||||
@Override
|
||||
public void process(HttpRequest httpRequest, HttpContext httpContext)
|
||||
throws HttpException, IOException
|
||||
{
|
||||
String secretName = SecretSharedPropertyCollector.getSecretHeader();
|
||||
String secretValue = SecretSharedPropertyCollector.getSecret();
|
||||
httpRequest.addHeader(new BasicHeader(secretName, secretValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility method to register the unique instance of this {@link HttpRequestInterceptor}
|
||||
*/
|
||||
public static void register()
|
||||
{
|
||||
HttpClientUtil.removeRequestInterceptor(getInstance());
|
||||
HttpClientUtil.addRequestInterceptor(getInstance());
|
||||
}
|
||||
|
||||
}
|
@@ -45,13 +45,14 @@ import org.alfresco.solr.SolrInformationServer;
|
||||
import org.alfresco.solr.SolrKeyResourceLoader;
|
||||
import org.alfresco.solr.client.SOLRAPIClient;
|
||||
import org.alfresco.solr.client.SOLRAPIClientFactory;
|
||||
import org.alfresco.solr.security.SecretSharedPropertyCollector;
|
||||
import org.alfresco.solr.tracker.AclTracker;
|
||||
import org.alfresco.solr.tracker.CascadeTracker;
|
||||
import org.alfresco.solr.tracker.CommitTracker;
|
||||
import org.alfresco.solr.tracker.ContentTracker;
|
||||
import org.alfresco.solr.tracker.MetadataTracker;
|
||||
import org.alfresco.solr.tracker.ModelTracker;
|
||||
import org.alfresco.solr.tracker.NodeStatePublisher;
|
||||
import org.alfresco.solr.tracker.ShardStatePublisher;
|
||||
import org.alfresco.solr.tracker.SolrTrackerScheduler;
|
||||
import org.alfresco.solr.tracker.Tracker;
|
||||
import org.alfresco.solr.tracker.TrackerRegistry;
|
||||
@@ -113,6 +114,9 @@ public class SolrCoreLoadListener extends AbstractSolrEventListener
|
||||
|
||||
TrackerRegistry trackerRegistry = admin.getTrackerRegistry();
|
||||
Properties coreProperties = new CoreDescriptorDecorator(core.getCoreDescriptor()).getProperties();
|
||||
|
||||
// Add secret shared properties if required, as they are passed as Java Environment Variables
|
||||
coreProperties = SecretSharedPropertyCollector.completeCoreProperties(coreProperties);
|
||||
|
||||
SolrResourceLoader loader = core.getLatestSchema().getResourceLoader();
|
||||
SolrKeyResourceLoader keyResourceLoader = new SolrKeyResourceLoader(loader);
|
||||
@@ -190,7 +194,7 @@ public class SolrCoreLoadListener extends AbstractSolrEventListener
|
||||
{
|
||||
LOGGER.info("SearchServices Core Trackers have been explicitly disabled on core \"{}\" through \"enable.alfresco.tracking\" configuration property.", core.getName());
|
||||
|
||||
NodeStatePublisher statePublisher = new NodeStatePublisher(false, coreProperties, repositoryClient, core.getName(), informationServer);
|
||||
ShardStatePublisher statePublisher = new ShardStatePublisher(false, coreProperties, repositoryClient, core.getName(), informationServer);
|
||||
trackerRegistry.register(core.getName(), statePublisher);
|
||||
scheduler.schedule(statePublisher, core.getName(), coreProperties);
|
||||
trackers.add(statePublisher);
|
||||
@@ -205,7 +209,7 @@ public class SolrCoreLoadListener extends AbstractSolrEventListener
|
||||
{
|
||||
LOGGER.info("SearchServices Core Trackers have been disabled on core \"{}\" because it is a slave core.", core.getName());
|
||||
|
||||
NodeStatePublisher statePublisher = new NodeStatePublisher(false, coreProperties, repositoryClient, core.getName(), informationServer);
|
||||
ShardStatePublisher statePublisher = new ShardStatePublisher(false, coreProperties, repositoryClient, core.getName(), informationServer);
|
||||
trackerRegistry.register(core.getName(), statePublisher);
|
||||
scheduler.schedule(statePublisher, core.getName(), coreProperties);
|
||||
trackers.add(statePublisher);
|
||||
@@ -264,9 +268,9 @@ public class SolrCoreLoadListener extends AbstractSolrEventListener
|
||||
trackerRegistry,
|
||||
scheduler);
|
||||
|
||||
NodeStatePublisher coreStateTracker =
|
||||
ShardStatePublisher coreStateTracker =
|
||||
registerAndSchedule(
|
||||
new NodeStatePublisher(true, props, repositoryClient, core.getName(), srv),
|
||||
new ShardStatePublisher(true, props, repositoryClient, core.getName(), srv),
|
||||
core,
|
||||
props,
|
||||
trackerRegistry,
|
||||
@@ -288,6 +292,7 @@ public class SolrCoreLoadListener extends AbstractSolrEventListener
|
||||
trackers.add(cascadeTracker);
|
||||
}
|
||||
|
||||
|
||||
//The CommitTracker will acquire these locks in order
|
||||
//The ContentTracker will likely have the longest runs so put it first to ensure the MetadataTracker is not paused while
|
||||
//waiting for the ContentTracker to release it's lock.
|
||||
|
@@ -30,7 +30,7 @@ import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.repo.search.adaptor.lucene.QueryConstants;
|
||||
import org.alfresco.repo.search.adaptor.QueryConstants;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.NumericDocValues;
|
||||
|
@@ -40,7 +40,7 @@ import java.util.Locale;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.repo.search.MLAnalysisMode;
|
||||
import org.alfresco.repo.search.adaptor.lucene.QueryConstants;
|
||||
import org.alfresco.repo.search.adaptor.QueryConstants;
|
||||
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
|
||||
import org.alfresco.service.cmr.search.QueryConsistency;
|
||||
import org.alfresco.service.cmr.search.SearchParameters;
|
||||
|
@@ -29,9 +29,9 @@ package org.alfresco.solr.query;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.alfresco.repo.search.MLAnalysisMode;
|
||||
import org.alfresco.repo.search.adaptor.lucene.LuceneQueryParserAdaptor;
|
||||
import org.alfresco.repo.search.adaptor.QueryParserAdaptor;
|
||||
import org.alfresco.repo.search.impl.parsers.FTSQueryParser;
|
||||
import org.alfresco.repo.search.impl.querymodel.impl.lucene.LuceneQueryBuilderContext;
|
||||
import org.alfresco.repo.search.impl.querymodel.impl.lucene.QueryBuilderContext;
|
||||
import org.alfresco.repo.tenant.TenantService;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.cmr.search.SearchParameters;
|
||||
@@ -47,13 +47,13 @@ import org.apache.solr.request.SolrQueryRequest;
|
||||
/**
|
||||
* @author andyh
|
||||
*/
|
||||
public class Lucene4QueryBuilderContextSolrImpl implements LuceneQueryBuilderContext<Query, Sort, ParseException>
|
||||
public class Lucene4QueryBuilderContextSolrImpl implements QueryBuilderContext<Query, Sort, ParseException>
|
||||
{
|
||||
private Solr4QueryParser lqp;
|
||||
|
||||
private NamespacePrefixResolver namespacePrefixResolver;
|
||||
|
||||
private LuceneQueryParserAdaptor<Query, Sort, ParseException> lqpa;
|
||||
private QueryParserAdaptor<Query, Sort, ParseException> lqpa;
|
||||
|
||||
/**
|
||||
* Context for building lucene queries
|
||||
@@ -90,7 +90,7 @@ public class Lucene4QueryBuilderContextSolrImpl implements LuceneQueryBuilderCon
|
||||
/* (non-Javadoc)
|
||||
* @see org.alfresco.repo.search.impl.querymodel.impl.lucene.LuceneQueryBuilderContext#getLuceneQueryParser()
|
||||
*/
|
||||
public LuceneQueryParserAdaptor<Query, Sort, ParseException> getLuceneQueryParserAdaptor()
|
||||
public QueryParserAdaptor<Query, Sort, ParseException> getLuceneQueryParserAdaptor()
|
||||
{
|
||||
return lqpa;
|
||||
}
|
||||
|
@@ -29,11 +29,11 @@ package org.alfresco.solr.query;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.search.adaptor.lucene.AnalysisMode;
|
||||
import org.alfresco.repo.search.adaptor.lucene.LuceneFunction;
|
||||
import org.alfresco.repo.search.adaptor.lucene.LuceneQueryParserAdaptor;
|
||||
import org.alfresco.repo.search.adaptor.lucene.LuceneQueryParserExpressionAdaptor;
|
||||
import org.alfresco.repo.search.adaptor.lucene.QueryConstants;
|
||||
import org.alfresco.repo.search.adaptor.QueryParserAdaptor;
|
||||
import org.alfresco.repo.search.adaptor.QueryParserExpressionAdaptor;
|
||||
import org.alfresco.repo.search.adaptor.AnalysisMode;
|
||||
import org.alfresco.repo.search.adaptor.LuceneFunction;
|
||||
import org.alfresco.repo.search.adaptor.QueryConstants;
|
||||
import org.alfresco.repo.search.impl.querymodel.FunctionEvaluationContext;
|
||||
import org.alfresco.repo.search.impl.querymodel.Ordering;
|
||||
import org.alfresco.service.cmr.dictionary.PropertyDefinition;
|
||||
@@ -53,7 +53,7 @@ import org.apache.lucene.search.TermQuery;
|
||||
* @author Andy
|
||||
*
|
||||
*/
|
||||
public class Lucene4QueryParserAdaptor implements LuceneQueryParserAdaptor<Query, Sort, ParseException>
|
||||
public class Lucene4QueryParserAdaptor implements QueryParserAdaptor<Query, Sort, ParseException>
|
||||
{
|
||||
|
||||
private Solr4QueryParser lqp;
|
||||
@@ -274,7 +274,7 @@ public class Lucene4QueryParserAdaptor implements LuceneQueryParserAdaptor<Query
|
||||
@Override
|
||||
public Query getNegatedQuery(Query query) throws ParseException
|
||||
{
|
||||
LuceneQueryParserExpressionAdaptor<Query, ParseException> expressionAdaptor = getExpressionAdaptor();
|
||||
QueryParserExpressionAdaptor<Query, ParseException> expressionAdaptor = getExpressionAdaptor();
|
||||
expressionAdaptor.addRequired(getMatchAllQuery());
|
||||
expressionAdaptor.addExcluded(query);
|
||||
return expressionAdaptor.getQuery();
|
||||
@@ -284,7 +284,7 @@ public class Lucene4QueryParserAdaptor implements LuceneQueryParserAdaptor<Query
|
||||
* @see org.alfresco.repo.search.adaptor.lucene.LuceneQueryParserAdaptor#getExpressionAdaptor()
|
||||
*/
|
||||
@Override
|
||||
public LuceneQueryParserExpressionAdaptor<Query, ParseException> getExpressionAdaptor()
|
||||
public QueryParserExpressionAdaptor<Query, ParseException> getExpressionAdaptor()
|
||||
{
|
||||
return new Lucene4QueryParserExpressionAdaptor();
|
||||
}
|
||||
@@ -307,7 +307,7 @@ public class Lucene4QueryParserAdaptor implements LuceneQueryParserAdaptor<Query
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
private class Lucene4QueryParserExpressionAdaptor implements LuceneQueryParserExpressionAdaptor<Query, ParseException>
|
||||
private class Lucene4QueryParserExpressionAdaptor implements QueryParserExpressionAdaptor<Query, ParseException>
|
||||
{
|
||||
BooleanQuery.Builder booleanQuery = new BooleanQuery.Builder();
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user