Merge branch 'master' of git.alfresco.com:search_discovery/insightengine into fix/SEARCH-1994_windows_concurrency_issue

This commit is contained in:
eliaporciani
2019-12-18 10:08:58 +01:00
14 changed files with 288 additions and 237 deletions

View File

@@ -40,4 +40,4 @@ More details are available at [search-services](/search-services) folder.
**Following resources will not be available for Community users**
More details are available at [insight-engine](/insight-engine) folder.
More details are available at [insight-engine](/insight-engine) folder.

View File

@@ -11,9 +11,9 @@
<name>Search Analytics E2E Tests</name>
<description>Test Project to test Search Service and Analytics Features on a complete setup of Alfresco, Share</description>
<properties>
<tas.rest.api.version>1.20</tas.rest.api.version>
<tas.rest.api.version>1.25</tas.rest.api.version>
<tas.cmis.api.version>1.11</tas.cmis.api.version>
<tas.utility.version>3.0.16</tas.utility.version>
<tas.utility.version>3.0.17</tas.utility.version>
<rm.version>3.2.0</rm.version>
<suiteXmlFile>src/test/resources/SearchSuite.xml</suiteXmlFile>
<test.exclude></test.exclude>

View File

@@ -338,6 +338,22 @@ This Docker Image is available at Alfresco Docker Hub:
To use the public image instead of the local one (`searchservices:develop`) just use `alfresco/alfresco-search-services:1.3.x.x` labels.
## Docker Master-Slave setup
### Enable Search Slave Replica config
To enable slave node specify environment value `REPLICATION_TYPE=slave`, by default Master config is enabled and slave is disabled.
During deployment time whenever Search Services or Insight Engine image starts, it will execute the script [search_config_setup.sh](/packaging/src/docker) which will configure the slave config setup based on the value specified in the script.
To run the docker image:
```bash
$ docker run -p 8984:8983 -e REPLICATION_TYPE=slave -e ALFRESCO_SECURE_COMMS=none -e SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive searchservices:develop
```
Solr-slave End point: [http://localhost:8984/solr](http://localhost:8984/solr)
To generate your own Docker-compose file please follow [generator-alfresco-docker-compose](../e2e-test/generator-alfresco-docker-compose/README.md)
### Use Alfresco Search Services Docker Image with Docker Compose
Sample configuration in a Docker Compose file using **Plain HTTP** protocol to communicate with Alfresco Repository.

View File

@@ -158,7 +158,7 @@
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>3.2.0</version>
<version>3.2.4</version>
<scope>test</scope>
</dependency>

View File

@@ -43,6 +43,7 @@ import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
@@ -257,7 +258,9 @@ public final class SolrContentStore implements Closeable, AccessMode
wr.write(Long.toString(version));
wr.close();
tmpFile.renameTo(new File(root, ".version"));
// file.renameTo(..) does not work on windows. Use Files.move instead.
Files.move(tmpFile.toPath(), new File(root, ".version").toPath(), StandardCopyOption.REPLACE_EXISTING);
}
catch (IOException exception)
{

View File

@@ -37,6 +37,7 @@ package org.alfresco.solr.handler;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import org.alfresco.solr.content.SolrContentStore;
import org.apache.commons.io.FilenameUtils;
import org.apache.http.client.HttpClient;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.index.IndexCommit;
@@ -188,6 +189,13 @@ class AlfrescoIndexFetcher
private final Map<String, FileInfo> confFileInfoCache = new HashMap<>();
private volatile Date replicationStartTimeStamp;
private RTimer replicationTimer;
/**
* The map<String, object> contains the following fields:
* NAME : String -> file name(with path for contentstore files)
* SIZE : long -> file size
* CHECKSUM : long -> checksum
*/
private volatile List<Map<String, Object>> filesToDownload;
private volatile List<Map<String, Object>> confFilesToDownload;
private volatile List<Map<String, Object>> tlogFilesToDownload;
@@ -197,6 +205,7 @@ class AlfrescoIndexFetcher
private volatile List<Map<String, Object>> confFilesDownloaded;
private volatile List<Map<String, Object>> tlogFilesDownloaded;
private volatile List<Map<String, Object>> contentStoreFilesDownloaded;
private volatile Map<String, Object> currentFile;
private volatile DirectoryFileFetcher dirFileFetcher;
private volatile LocalFsFileFetcher localFileFetcher;
@@ -1738,13 +1747,20 @@ class AlfrescoIndexFetcher
private void cleanUpContentStore(String contentStorePath) throws Exception
{
AtomicInteger fileDeleted = new AtomicInteger();
Set<String> fileNames = contentStoreFilesToDownload.stream().map(e -> (String) e.get(NAME))
// This is the set of the ONLY files that should be in contentStore.
// This set is computed from the information got from master. After a full replication, only the files
// that have been downloaded from master (contentStoreFilesToDownload) should be in contentStore.
// The file paths are translated in the current OS path notation.
Set<String> contentStoreFiles = contentStoreFilesToDownload.stream()
.map(e -> (String) e.get(NAME))
.map(FilenameUtils::separatorsToSystem)
.collect(Collectors.toSet());
try
{
Files.walk(Paths.get(contentStorePath)).forEach(p -> {
File f = new File(p.toUri());
if (!f.isDirectory() && !fileNames.contains(p.toString().replace(contentStorePath, "")))
if (!f.isDirectory() && !contentStoreFiles.contains(p.toString().replace(contentStorePath, "")))
{
try
{

View File

@@ -33,7 +33,6 @@ import org.apache.solr.core.SolrCore;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.junit.Before;
import org.junit.BeforeClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -83,8 +82,9 @@ import static org.alfresco.repo.search.adaptor.lucene.QueryConstants.FIELD_VERSI
* class hierarchy. Ideally this function should be retired in favour of better
* annotations..
*
* @since solr 1.5
* @author Michael Suzuki
* @since solr 1.4.1
* @author Michael Suzuki
* @author Andrea Gazzarini
*/
@ThreadLeakLingering(linger = 5000)
public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
@@ -92,8 +92,8 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
protected static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
protected String[] deadServers;
protected static SolrResponsesComparator solrComparator = new SolrResponsesComparator();
protected static RandomSupplier solrRandomSupplier;
protected static SolrResponsesComparator SOLR_RESPONSE_COMPARATOR = new SolrResponsesComparator();
protected static RandomSupplier SOLR_RANDOM_SUPPLIER;
// to stress with higher thread counts and requests, make sure the junit
// xml formatter is not being used (all output will be buffered before
@@ -112,34 +112,26 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
* Test configs may use the <code>${hostContext}</code> variable to access
* this system property.
* </p>
*
*/
@BeforeClass
public static void setUpSolrTestProperties()
{
SOLR_RANDOM_SUPPLIER = new RandomSupplier();
System.setProperty("alfresco.test", "true");
System.setProperty("solr.tests.maxIndexingThreads", "10");
System.setProperty("solr.tests.ramBufferSizeMB", "1024");
}
@Before
public void setupPerTest()
{
this.solrRandomSupplier = new RandomSupplier();
}
public static String[] fieldNames = new String[]
{ "n_ti1", "n_f1", "n_tf1", "n_d1", "n_td1", "n_l1", "n_tl1", "n_dt1", "n_tdt1" };
public static final String[] FIELD_NAMES = new String[] { "n_ti1", "n_f1", "n_tf1", "n_d1", "n_td1", "n_l1", "n_tl1", "n_dt1", "n_tdt1" };
protected static String[] getFieldNames()
{
return fieldNames;
return FIELD_NAMES;
}
protected void putHandleDefaults() {
solrComparator.putHandleDefaults();
protected static void putHandleDefaults()
{
SOLR_RESPONSE_COMPARATOR.putHandleDefaults();
}
/**
@@ -147,8 +139,6 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
* @param query - query to execute
* @param count - min number of results each shard must satisfy
* @param waitMillis - total ms to wait
* @return
* @throws Exception
*/
public static boolean checkMinCountPerShard(Query query, int count, long waitMillis) throws SolrServerException,IOException
{
@@ -160,7 +150,7 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
for (SolrClient singleShard : shardedClients)
{
allShardCompliant = false;
int totalHits = 0;
int totalHits;
int cycles = 1;
while ((new Date()).getTime() < timeout && (!allShardCompliant))
{
@@ -172,11 +162,11 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
}
try
{
Thread.sleep((long) (500 * cycles++));
Thread.sleep(500 * cycles++);
}
catch (InterruptedException e)
{
continue;
// Ignore
}
}
}
@@ -205,11 +195,6 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
/**
* Waits until all cores (including shards) reach a count.
*
* @param query
* @param count
* @param waitMillis
* @throws Exception
*/
public static void waitForDocCountAllCores(Query query, int count, long waitMillis) throws Exception
{
@@ -230,12 +215,9 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
/**
* Delele by query on all Clients
*
* @param q
* @throws Exception
*/
public static void deleteByQueryAllClients(String q) throws Exception {
public static void deleteByQueryAllClients(String q) throws Exception
{
List<SolrClient> clients = getStandaloneAndShardedClients();
for (SolrClient client : clients) {
@@ -245,14 +227,11 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
/**
* Gets the Default test client.
*
* @return
*/
protected static SolrClient getDefaultTestClient()
{
return solrCollectionNameToStandaloneClient.get(DEFAULT_TEST_CORENAME);
}
protected static List<SolrClient> getShardedClients()
{
@@ -266,7 +245,7 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
*/
public static List<SolrClient> getStandaloneAndShardedClients()
{
List<SolrClient> clients = new ArrayList();
List<SolrClient> clients = new ArrayList<>();
clients.addAll(solrCollectionNameToStandaloneClient.values());
clients.addAll(clientShards);
return clients;
@@ -275,17 +254,11 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
public static List<SolrClient> getStandaloneClients()
{
List<SolrClient> clients = new ArrayList();
clients.addAll(solrCollectionNameToStandaloneClient.values());
return clients;
return new ArrayList<>(solrCollectionNameToStandaloneClient.values());
}
/**
* Waits for the doc count on the first core available, then checks all the Shards match.
* @param query
* @param count
* @param waitMillis
* @throws Exception
*/
public static void waitForDocCount(Query query, int count, long waitMillis) throws Exception
{
@@ -332,7 +305,8 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
return escapedField + ":" + value + " ";
}
protected static String escapeQueryChars(String query) {
protected static String escapeQueryChars(String query)
{
return query.replaceAll("\\:", "\\\\:")
.replaceAll("\\{", "\\\\{")
.replaceAll("\\}", "\\\\}");
@@ -341,7 +315,7 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
public static SolrQuery luceneToSolrQuery(Query query)
{
String[] terms = query.toString().split(" ");
String escapedQuery = new String();
String escapedQuery = "";
for (String t : terms)
{
escapedQuery += escapeQueryClause(t);
@@ -366,8 +340,10 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
waitForShardsCount(solrQuery, count, waitMillis, start);
}
protected static void injectDocToShards(long txnId, long aclId, long dbId, String owner) throws Exception {
for(SolrClient clientShard : clientShards) {
protected static void injectDocToShards(long txnId, long aclId, long dbId, String owner) throws Exception
{
for(SolrClient clientShard : clientShards)
{
SolrInputDocument doc = new SolrInputDocument();
String id = AlfrescoSolrDataModel.getNodeDocumentId(AlfrescoSolrDataModel.DEFAULT_TENANT, aclId, dbId);
doc.addField(FIELD_SOLR4_ID, id);
@@ -383,21 +359,20 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
/**
* Gets the cores for the jetty instances
* @return
*/
protected static List<SolrCore> getJettyCores(Collection<JettySolrRunner> runners)
{
List<SolrCore> cores = new ArrayList();
List<SolrCore> cores = new ArrayList<>();
for (JettySolrRunner jettySolrRunner : runners)
{
jettySolrRunner.getCoreContainer().getCores().forEach(aCore -> cores.add(aCore));
cores.addAll(jettySolrRunner.getCoreContainer().getCores());
}
return cores;
}
protected static List<AlfrescoCoreAdminHandler> getAdminHandlers(Collection<JettySolrRunner> runners)
{
List<AlfrescoCoreAdminHandler> coreAdminHandlers = new ArrayList();
List<AlfrescoCoreAdminHandler> coreAdminHandlers = new ArrayList<>();
for (JettySolrRunner jettySolrRunner : runners)
{
CoreContainer coreContainer = jettySolrRunner.getCoreContainer();
@@ -418,32 +393,27 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
List<SolrClient> clients = getShardedClients();
SolrQuery query = luceneToSolrQuery(new TermQuery(new Term(FIELD_DOC_TYPE, SolrInformationServer.DOC_TYPE_NODE)));
StringBuilder error = new StringBuilder();
for (int i = 0; i < clients.size(); ++i)
for (SolrClient client : clients)
{
SolrClient client = clients.get(i);
QueryResponse response = client.query(query);
int totalHits = (int) response.getResults().getNumFound();
if(totalHits > 0)
if (totalHits > 0)
{
shardHit++;
}
if(totalHits < count)
if (totalHits < count)
{
if (ignoreZero && totalHits == 0)
{
log.info(client+": have zero hits ");
}
else
{
error.append(" "+client+": ");
error.append("Expected nodes per shard greater than "+count+" found "+totalHits+" : "+query.toString());
if (ignoreZero && totalHits == 0) {
log.info(client + ": have zero hits ");
} else {
error.append(" " + client + ": ");
error.append("Expected nodes per shard greater than " + count + " found " + totalHits + " : " + query.toString());
}
}
log.info(client+": Hits "+totalHits);
log.info(client + ": Hits " + totalHits);
}
@@ -560,7 +530,7 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
protected String getShardsString()
{
Random r = solrRandomSupplier.getRandomGenerator();
Random r = SOLR_RANDOM_SUPPLIER.getRandomGenerator();
if (deadServers == null)
return shards;
@@ -610,7 +580,7 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
protected static SolrInputDocument addRandFields(SolrInputDocument sdoc)
{
addFields(sdoc, solrRandomSupplier.getRandFields(getFieldNames(), solrRandomSupplier.getRandValues()));
addFields(sdoc, SOLR_RANDOM_SUPPLIER.getRandFields(getFieldNames(), SOLR_RANDOM_SUPPLIER.getRandValues()));
return sdoc;
}
@@ -662,7 +632,7 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
{
UpdateResponse controlRsp = add(client1, params, sdocs);
UpdateResponse specificRsp = add(client2, params, sdocs);
solrComparator.compareSolrResponses(specificRsp, controlRsp);
SOLR_RESPONSE_COMPARATOR.compareSolrResponses(specificRsp, controlRsp);
return specificRsp;
}
@@ -724,9 +694,6 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
/**
* * Commits to the specified client, and optionally all shards
* @param client
* @param andShards
* @throws Exception
*/
protected static void commit(SolrClient client, boolean andShards) throws Exception
{
@@ -743,7 +710,7 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
protected static QueryResponse queryRandomShard(ModifiableSolrParams params) throws SolrServerException, IOException
{
Random r = solrRandomSupplier.getRandomGenerator();
Random r = SOLR_RANDOM_SUPPLIER.getRandomGenerator();
int which = r.nextInt(clientShards.size());
SolrClient client = clientShards.get(which);
QueryResponse rsp = client.query(params);
@@ -755,13 +722,13 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
params.set("distrib", "false");
QueryRequest request = getAlfrescoRequest(json, params);
QueryResponse controlRsp = request.process(solrClient);
solrComparator.validateResponse(controlRsp);
SOLR_RESPONSE_COMPARATOR.validateResponse(controlRsp);
if (andShards)
{
params.remove("distrib");
setDistributedParams(params);
QueryResponse rsp = queryRandomShard(json, params);
solrComparator.compareResponses(rsp, controlRsp);
SOLR_RESPONSE_COMPARATOR.compareResponses(rsp, controlRsp);
return rsp;
}
else
@@ -772,7 +739,7 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
protected static QueryResponse queryRandomShard(String json, SolrParams params) throws SolrServerException, IOException
{
Random r = solrRandomSupplier.getRandomGenerator();
Random r = SOLR_RANDOM_SUPPLIER.getRandomGenerator();
int which = r.nextInt(clientShards.size());
SolrClient client = clientShards.get(which);
QueryRequest request = getAlfrescoRequest(json, params);
@@ -791,13 +758,13 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
*/
protected QueryResponse query(SolrClient solrClient, boolean setDistribParams, SolrParams p) throws Exception
{
Random r = solrRandomSupplier.getRandomGenerator();
Random r = SOLR_RANDOM_SUPPLIER.getRandomGenerator();
final ModifiableSolrParams params = new ModifiableSolrParams(p);
// TODO: look into why passing true causes fails
params.set("distrib", "false");
final QueryResponse controlRsp = solrClient.query(params);
solrComparator.validateResponse(controlRsp);
SOLR_RESPONSE_COMPARATOR.validateResponse(controlRsp);
params.remove("distrib");
if (setDistribParams)
@@ -805,7 +772,7 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
QueryResponse rsp = queryRandomShard(params);
solrComparator.compareResponses(rsp, controlRsp);
SOLR_RESPONSE_COMPARATOR.compareResponses(rsp, controlRsp);
if (stress > 0)
{
@@ -827,7 +794,7 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
QueryResponse rsp = client.query(new ModifiableSolrParams(params));
if (verifyStress)
{
solrComparator.compareResponses(rsp, controlRsp);
SOLR_RESPONSE_COMPARATOR.compareResponses(rsp, controlRsp);
}
} catch (SolrServerException | IOException e)
{
@@ -865,7 +832,7 @@ public abstract class AbstractAlfrescoDistributedIT extends SolrITInitializer
first = rsp;
} else
{
solrComparator.compareResponses(first, rsp);
SOLR_RESPONSE_COMPARATOR.compareResponses(first, rsp);
}
}

View File

@@ -91,8 +91,8 @@ public abstract class SolrITInitializer extends SolrTestCaseJ4
//Standalone Tests
protected static SolrCore defaultCore;
protected static final int clientConnectionTimeout = DEFAULT_CONNECTION_TIMEOUT;;
protected static final int clientSoTimeout = 90000;;
protected static final int clientConnectionTimeout = DEFAULT_CONNECTION_TIMEOUT;
protected static final int clientSoTimeout = 90000;
protected static final String id = "id";
@@ -129,21 +129,22 @@ public abstract class SolrITInitializer extends SolrTestCaseJ4
jettyContainers = new HashMap<>();
nodeCnt = new AtomicInteger(0);
String serverName = testClassName;
currentTestName = serverName;
currentTestName = testClassName;
String[] coreNames = new String[]{DEFAULT_TEST_CORENAME};
distribSetUp(serverName);
RandomSupplier.RandVal.uniqueValues = new HashSet(); // reset random values
createServers(serverName, coreNames, numShards,solrcoreProperties);
System.setProperty("solr.solr.home", testDir.toPath().resolve(serverName).toString());
distribSetUp(testClassName);
RandomSupplier.RandVal.uniqueValues = new HashSet<>(); // reset random values
createServers(testClassName, coreNames, numShards,solrcoreProperties);
System.setProperty("solr.solr.home", testDir.toPath().resolve(testClassName).toString());
}
private static Properties addExplicitShardingProperty(Properties solrcoreProperties)
{
if(solrcoreProperties == null){
if(solrcoreProperties == null)
{
solrcoreProperties = new Properties();
}
if(solrcoreProperties.getProperty("shard.method")==null)
{
solrcoreProperties.put("shard.method", "EXPLICIT_ID");
@@ -151,7 +152,8 @@ public abstract class SolrITInitializer extends SolrTestCaseJ4
return solrcoreProperties;
}
public static void initSingleSolrServer(String testClassName, Properties solrcoreProperties) throws Throwable {
public static void initSingleSolrServer(String testClassName, Properties solrcoreProperties) throws Throwable
{
initSolrServers(0,testClassName,solrcoreProperties);
JettySolrRunner jsr = jettyContainers.get(testClassName);
@@ -163,7 +165,8 @@ public abstract class SolrITInitializer extends SolrTestCaseJ4
{
int i = 0;
extras = new String[solrcoreProperties.size()*2];
for (Map.Entry<Object, Object> prop:solrcoreProperties.entrySet()) {
for (Map.Entry<Object, Object> prop:solrcoreProperties.entrySet())
{
extras[i++] = "property."+prop.getKey();
extras[i++] = (String) prop.getValue();
}
@@ -185,7 +188,7 @@ public abstract class SolrITInitializer extends SolrTestCaseJ4
destroyServers();
distribTearDown();
boolean keepTests = Boolean.valueOf(System.getProperty("keep.tests"));
boolean keepTests = Boolean.parseBoolean(System.getProperty("keep.tests"));
if (!keepTests) FileUtils.deleteDirectory(testDir);
}
catch (Exception e)
@@ -203,7 +206,7 @@ public abstract class SolrITInitializer extends SolrTestCaseJ4
return System.getProperty("user.dir") + "/target/test-classes/test-files";
}
public static void distribSetUp(String serverName) throws Exception
public static void distribSetUp(String serverName)
{
SolrTestCaseJ4.resetExceptionIgnores(); // ignore anything with
// ignore_exception in it
@@ -213,7 +216,7 @@ public abstract class SolrITInitializer extends SolrTestCaseJ4
System.setProperty("solr.log.dir", testDir.toPath().resolve(serverName).toString());
}
public static void distribTearDown() throws Exception
public static void distribTearDown()
{
System.clearProperty("solr.directoryFactory");
System.clearProperty("solr.log.dir");
@@ -230,8 +233,6 @@ public abstract class SolrITInitializer extends SolrTestCaseJ4
/**
* Creates a JettySolrRunner (if one didn't exist already). DOES NOT START IT.
* @return
* @throws Exception
*/
protected static JettySolrRunner createJetty(String jettyKey, boolean basicAuth) throws Exception
{
@@ -243,19 +244,13 @@ public abstract class SolrITInitializer extends SolrTestCaseJ4
{
Path jettySolrHome = testDir.toPath().resolve(jettyKey);
seedSolrHome(jettySolrHome);
JettySolrRunner jetty = createJetty(jettySolrHome.toFile(), null, null, false, 0, getSchemaFile(), basicAuth);
return jetty;
return createJetty(jettySolrHome.toFile(), null, null, false, 0, getSchemaFile(), basicAuth);
}
}
/**
* Adds the core config information to the jetty file system.
* Its best to call this before calling start() on Jetty
* @param jettyKey
* @param sourceConfigName
* @param coreName
* @param additionalProperties
* @throws Exception
*/
protected static void addCoreToJetty(String jettyKey, String sourceConfigName, String coreName, Properties additionalProperties) throws Exception
{
@@ -289,17 +284,14 @@ public abstract class SolrITInitializer extends SolrTestCaseJ4
out.close();
in.close();
}
}
}
/**
* Starts jetty if its not already running
* @param jsr
* @throws Exception
*/
protected static void startJetty(JettySolrRunner jsr) throws Exception {
protected static void startJetty(JettySolrRunner jsr) throws Exception
{
if (!jsr.isRunning())
{
jsr.start();
@@ -310,37 +302,40 @@ public abstract class SolrITInitializer extends SolrTestCaseJ4
{
boolean basicAuth = additionalProperties != null ? Boolean.parseBoolean(additionalProperties.getProperty("BasicAuth", "false")) : false;
JettySolrRunner jsr = createJetty(jettyKey, basicAuth);
jettyContainers.put(jettyKey, jsr);
Properties properties = new Properties();
if(additionalProperties != null && additionalProperties.size() > 0) {
if(additionalProperties != null && additionalProperties.size() > 0)
{
properties.putAll(additionalProperties);
properties.remove("shard.method");
}
for (int i = 0; i < coreNames.length; i++)
for (String coreName : coreNames)
{
addCoreToJetty(jettyKey, coreNames[i], coreNames[i], properties);
addCoreToJetty(jettyKey, coreName, coreName, properties);
}
//Now start jetty
startJetty(jsr);
int jettyPort = jsr.getLocalPort();
for (int i = 0; i < coreNames.length; i++)
for (String coreName : coreNames)
{
String url = buildUrl(jettyPort) + "/" + coreNames[i];
String url = buildUrl(jettyPort) + "/" + coreName;
log.info(url);
solrCollectionNameToStandaloneClient.put(coreNames[i], createNewSolrClient(url));
solrCollectionNameToStandaloneClient.put(coreName, createNewSolrClient(url));
}
shardsArr = new String[numShards];
StringBuilder sb = new StringBuilder();
if (additionalProperties == null) {
if (additionalProperties == null)
{
additionalProperties = new Properties();
}
String[] ranges = {"0-100", "100-200", "200-300", "300-400"};
@@ -382,6 +377,7 @@ public abstract class SolrITInitializer extends SolrTestCaseJ4
solrHomes.add(jetty.getSolrHome());
jetty.stop();
}
for (SolrClient jClients : solrCollectionNameToStandaloneClient.values())
{
jClients.close();
@@ -409,25 +405,16 @@ public abstract class SolrITInitializer extends SolrTestCaseJ4
solrCollectionNameToStandaloneClient.clear();
}
public static JettySolrRunner createJetty(File solrHome, String dataDir, String shardList, boolean sslEnabled, int port, String schemaOverride, boolean basicAuth) throws Exception
public static JettySolrRunner createJetty(File solrHome, String dataDir, String shardList, boolean sslEnabled, int port, String schemaOverride, boolean basicAuth)
{
return createJetty(solrHome, dataDir, shardList, sslEnabled, port, schemaOverride, useExplicitNodeNames, basicAuth);
}
/**
* Create a solr jetty server.
*
* @param solrHome
* @param dataDir
* @param shardList
* @param port
* @param schemaOverride
* @param explicitCoreNodeName
* @return
* @throws Exception
*/
public static JettySolrRunner createJetty(File solrHome, String dataDir, String shardList, boolean sslEnabled, int port,
String schemaOverride, boolean explicitCoreNodeName, boolean basicAuth) throws Exception
String schemaOverride, boolean explicitCoreNodeName, boolean basicAuth)
{
Properties props = new Properties();
if (schemaOverride != null)
@@ -443,20 +430,21 @@ public abstract class SolrITInitializer extends SolrTestCaseJ4
{
props.setProperty("coreNodeName", Integer.toString(nodeCnt.incrementAndGet()));
}
SSLConfig sslConfig = new SSLConfig(sslEnabled, false, null, null, null, null);
JettyConfig config = null;
JettyConfig config;
if(basicAuth) {
System.out.println("###### adding basic auth ######");
if(basicAuth)
{
log.info("###### adding basic auth ######");
config = JettyConfig.builder().setContext("/solr").setPort(port).withFilter(BasicAuthFilter.class, "/sql/*").stopAtShutdown(true).withSSLConfig(sslConfig).build();
} else {
System.out.println("###### no basic auth ######");
log.info("###### no basic auth ######");
config = JettyConfig.builder().setContext("/solr").setPort(port).stopAtShutdown(true).withSSLConfig(sslConfig).build();
}
JettySolrRunner jetty = new JettySolrRunner(solrHome.getAbsolutePath(), props, config);
return jetty;
return new JettySolrRunner(solrHome.getAbsolutePath(), props, config);
}
/**
@@ -510,11 +498,8 @@ public abstract class SolrITInitializer extends SolrTestCaseJ4
*/
protected static void seedSolrHome(Path jettyHome) throws IOException
{
String solrxml = getSolrXml();
if (solrxml != null)
{
FileUtils.copyFile(new File(getTestFilesHome(), solrxml), jettyHome.resolve(getSolrXml()).toFile());
}
FileUtils.copyFile(new File(getTestFilesHome(), getSolrXml()), jettyHome.resolve(getSolrXml()).toFile());
//Add solr home conf folder with alfresco based configuration.
FileUtils.copyDirectory(new File(getTestFilesHome() + "/conf"), jettyHome.resolve("conf").toFile());
// Add alfresco data model def
@@ -547,36 +532,26 @@ public abstract class SolrITInitializer extends SolrTestCaseJ4
FileUtils.copyDirectory(coreSourceConfig.resolve("conf").toFile(), confDir.toFile());
}
protected void setupJettySolrHome(String coreName, Path jettyHome) throws IOException
public static class BasicAuthFilter implements Filter
{
seedSolrHome(jettyHome);
Properties coreProperties = new Properties();
coreProperties.setProperty("name", coreName);
coreProperties.setProperty("shard", "${shard:}");
coreProperties.setProperty("collection", "${collection:" + coreName + "}");
coreProperties.setProperty("config", "${solrconfig:solrconfig.xml}");
coreProperties.setProperty("schema", "${schema:schema.xml}");
coreProperties.setProperty("coreNodeName", "${coreNodeName:}");
writeCoreProperties(jettyHome.resolve("cores").resolve(coreName), coreProperties, coreName);
}
public static class BasicAuthFilter implements Filter {
public BasicAuthFilter() {
public BasicAuthFilter()
{
}
public void init(FilterConfig config) {
public void init(FilterConfig config)
{
}
public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain)
throws IOException, ServletException {
throws IOException, ServletException
{
//Parse the basic auth filter
String auth = ((HttpServletRequest)request).getHeader("Authorization");
if(auth != null) {
if(auth != null)
{
auth = auth.replace("Basic ", "");
byte[] bytes = Base64.getDecoder().decode(auth);
String decodedBytes = new String(bytes);
@@ -584,18 +559,24 @@ public abstract class SolrITInitializer extends SolrTestCaseJ4
String user = pair[0];
String password = pair[1];
//Just look for the hard coded user and password.
if (user.equals("test") && password.equals("pass")) {
if (user.equals("test") && password.equals("pass"))
{
filterChain.doFilter(request, response);
} else {
}
else
{
((HttpServletResponse) response).sendError(HttpServletResponse.SC_FORBIDDEN);
}
} else {
}
else
{
((HttpServletResponse) response).sendError(HttpServletResponse.SC_FORBIDDEN);
}
}
public void destroy() {
public void destroy()
{
}
}
}
}

View File

@@ -22,7 +22,7 @@
</distributionManagement>
<properties>
<dependency.alfresco-data-model.version>8.60</dependency.alfresco-data-model.version>
<dependency.alfresco-data-model.version>8.68</dependency.alfresco-data-model.version>
<dependency.jackson.version>2.10.1</dependency.jackson.version>
</properties>
@@ -67,13 +67,13 @@
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>3.2.0</version>
<version>3.2.4</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.29</version>
<version>1.7.30</version>
</dependency>
</dependencies>

View File

@@ -37,6 +37,10 @@ services:
search:
image: quay.io/alfresco/search-services:${SEARCH_TAG}
environment:
#Replication properties
- REPLICATION_TYPE=master
#- REPLICATION_AFTER=commit,startup- SOLR_ALFRESCO_HOST=alfresco
#- REPLICATION_CONFIG_FILES=schema.xml,stopwords.txt- SOLR_ALFRESCO_PORT=8080
#Solr needs to know how to register itself with Alfresco
- SOLR_ALFRESCO_HOST=alfresco
- SOLR_ALFRESCO_PORT=8080
@@ -51,6 +55,27 @@ services:
- ENABLE_SPELLCHECK=${SEARCH_ENABLE_SPELLCHECK}
ports:
- 8083:8983 #Browser port
#search_slave:
# image: quay.io/alfresco/search-services:${SEARCH_TAG}
# environment:
# #Replication properties
# - REPLICATION_TYPE=slave
# - REPLICATION_MASTER_HOST=search
# - REPLICATION_MASTER_PORT=8983
# #- REPLICATION_MASTER_PROTOCOL=http
# #- REPLICATION_CORE_NAME=alfresco
# #- REPLICATION_POLL_INTERVAL=00:00:60
# #Solr needs to know how to register itself with Alfresco
# - SOLR_ALFRESCO_HOST=alfresco
# - SOLR_ALFRESCO_PORT=8080
# #Alfresco needs to know how to call solr
# - SOLR_SOLR_HOST=search
# - SOLR_SOLR_PORT=8983
# #Create the default alfresco and archive cores
# - SOLR_CREATE_ALFRESCO_DEFAULTS=alfresco,archive
# ports:
# - 8084:8983 #Browser port
activemq:
image: alfresco/alfresco-activemq:5.15.6
ports:

View File

@@ -1,5 +1,48 @@
#!/bin/bash
set -e
# By default its going to deploy "Master" setup configuration with "REPLICATION_TYPE=master".
# Slave replica service can be enabled using "REPLICATION_TYPE=slave" environment value.
SOLR_CONFIG_FILE=$PWD/solrhome/templates/rerank/conf/solrconfig.xml
if [[ $REPLICATION_TYPE == "master" ]]; then
findStringMaster='<requestHandler name="\/replication" class="org\.alfresco\.solr\.handler\.AlfrescoReplicationHandler">/<requestHandler name="\/replication" class="org\.alfresco\.solr\.handler\.AlfrescoReplicationHandler">'
replaceStringMaster="\n\t<lst name=\"master\"> \n"
if [[ $REPLICATION_AFTER == "" ]]; then
REPLICATION_AFTER=commit
fi
for i in $(echo $REPLICATION_AFTER | sed "s/,/ /g")
do
replaceStringMaster+="\t\t<str name=\"replicateAfter\">"$i"<\/str> \n"
done
if [[ ! -z "$REPLICATION_CONFIG_FILES" ]]; then
replaceStringMaster+="\t\t<str name=\"confFiles\">$REPLICATION_CONFIG_FILES<\/str> \n"
fi
replaceStringMaster+="\t<\/lst>"
sed -i "s/$findStringMaster/$findStringMaster$replaceStringMaster/g" $SOLR_CONFIG_FILE
fi
if [[ $REPLICATION_TYPE == "slave" ]]; then
if [[ $REPLICATION_MASTER_PROTOCOL == "" ]]; then
REPLICATION_MASTER_PROTOCOL=http
fi
if [[ $REPLICATION_MASTER_HOST == "" ]]; then
REPLICATION_MASTER_HOST=localhost
fi
if [[ $REPLICATION_MASTER_PORT == "" ]]; then
REPLICATION_MASTER_PORT=8083
fi
if [[ $REPLICATION_CORE_NAME == "" ]]; then
REPLICATION_CORE_NAME=alfresco
fi
if [[ $REPLICATION_POLL_INTERVAL == "" ]]; then
REPLICATION_POLL_INTERVAL=00:00:30
fi
sed -i 's/<requestHandler name="\/replication" class="org\.alfresco\.solr\.handler\.AlfrescoReplicationHandler">/<requestHandler name="\/replication" class="org\.alfresco\.solr\.handler\.AlfrescoReplicationHandler">\
<lst name="slave">\
<str name="masterUrl">'$REPLICATION_MASTER_PROTOCOL':\/\/'$REPLICATION_MASTER_HOST':'$REPLICATION_MASTER_PORT'\/solr\/'$REPLICATION_CORE_NAME'<\/str>\
<str name="pollInterval">'$REPLICATION_POLL_INTERVAL'<\/str>\
<\/lst>/g' $SOLR_CONFIG_FILE
fi
SOLR_IN_FILE=$PWD/solr.in.sh

View File

@@ -1,58 +1,58 @@
LICENSE FOR THE Extreme! Lab PullParser
------------------------------------------------------------------------
Copyright © 2002 The Trustees of Indiana University.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1) All redistributions of source code must retain the above
copyright notice, the list of authors in the original source
code, this list of conditions and the disclaimer listed in this
license;
2) All redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the disclaimer
listed in this license in the documentation and/or other
materials provided with the distribution;
3) Any documentation included with all redistributions must include
the following acknowledgement:
"This product includes software developed by the Indiana
University Extreme! Lab. For further information please visit
http://www.extreme.indiana.edu/"
Alternatively, this acknowledgment may appear in the software
itself, and wherever such third-party acknowledgments normally
appear.
4) The name "Indiana Univeristy" and "Indiana Univeristy
Extreme! Lab" shall not be used to endorse or promote
products derived from this software without prior written
permission from Indiana University. For written permission,
please contact http://www.extreme.indiana.edu/.
5) Products derived from this software may not use "Indiana
Univeristy" name nor may "Indiana Univeristy" appear in their name,
without prior written permission of the Indiana University.
Indiana University provides no reassurances that the source code
provided does not infringe the patent or any other intellectual
property rights of any other entity. Indiana University disclaims any
liability to any recipient for claims brought by any other entity
based on infringement of intellectual property rights or otherwise.
LICENSEE UNDERSTANDS THAT SOFTWARE IS PROVIDED "AS IS" FOR WHICH
NO WARRANTIES AS TO CAPABILITIES OR ACCURACY ARE MADE. INDIANA
UNIVERSITY GIVES NO WARRANTIES AND MAKES NO REPRESENTATION THAT
SOFTWARE IS FREE OF INFRINGEMENT OF THIRD PARTY PATENT, COPYRIGHT, OR
OTHER PROPRIETARY RIGHTS.  INDIANA UNIVERSITY MAKES NO WARRANTIES THAT
SOFTWARE IS FREE FROM "BUGS", "VIRUSES", "TROJAN HORSES", "TRAP
DOORS", "WORMS", OR OTHER HARMFUL CODE. LICENSEE ASSUMES THE ENTIRE
RISK AS TO THE PERFORMANCE OF SOFTWARE AND/OR ASSOCIATED MATERIALS,
AND TO THE PERFORMANCE AND VALIDITY OF INFORMATION GENERATED USING
SOFTWARE.
LICENSE FOR THE Extreme! Lab
------------------------------------------------------------------------
Copyright © 2003 The Trustees of Indiana University.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1) All redistributions of source code must retain the above
copyright notice, the list of authors in the original source
code, this list of conditions and the disclaimer listed in this
license;
2) All redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the disclaimer
listed in this license in the documentation and/or other
materials provided with the distribution;
3) Any documentation included with all redistributions must include
the following acknowledgement:
"This product includes software developed by the Indiana
University Extreme! Lab. For further information please visit
http://www.extreme.indiana.edu/"
Alternatively, this acknowledgment may appear in the software
itself, and wherever such third-party acknowledgments normally
appear.
4) The name "Indiana University" and "Indiana University
Extreme! Lab" shall not be used to endorse or promote
products derived from this software without prior written
permission from Indiana University. For written permission,
please contact http://www.extreme.indiana.edu/.
5) Products derived from this software may not use "Indiana
University" name nor may "Indiana University" appear in their name,
without prior written permission of the Indiana University.
Indiana University provides no reassurances that the source code
provided does not infringe the patent or any other intellectual
property rights of any other entity. Indiana University disclaims any
liability to any recipient for claims brought by any other entity
based on infringement of intellectual property rights or otherwise.
LICENSEE UNDERSTANDS THAT SOFTWARE IS PROVIDED "AS IS" FOR WHICH
NO WARRANTIES AS TO CAPABILITIES OR ACCURACY ARE MADE. INDIANA
UNIVERSITY GIVES NO WARRANTIES AND MAKES NO REPRESENTATION THAT
SOFTWARE IS FREE OF INFRINGEMENT OF THIRD PARTY PATENT, COPYRIGHT, OR
OTHER PROPRIETARY RIGHTS.  INDIANA UNIVERSITY MAKES NO WARRANTIES THAT
SOFTWARE IS FREE FROM "BUGS", "VIRUSES", "TROJAN HORSES", "TRAP
DOORS", "WORMS", OR OTHER HARMFUL CODE. LICENSEE ASSUMES THE ENTIRE
RISK AS TO THE PERFORMANCE OF SOFTWARE AND/OR ASSOCIATED MATERIALS,
AND TO THE PERFORMANCE AND VALIDITY OF INFORMATION GENERATED USING
SOFTWARE.

View File

@@ -22,8 +22,8 @@ antlr-3.5.2.jar http://www.antlr.org/
jaxen-1.2.0.jar http://www.cafeconleche.org/jaxen/
=== Apache variant License ===
xpp3-1.1.3_8.jar http://www.extreme.indiana.edu/xgws/xsoap/xpp/
=== BSD variant License ===
xpp3-1.1.4c.jar http://www.extreme.indiana.edu/dist/java-repository/xpp3/licenses/LICENSE.txt
=== JSON ===
@@ -53,7 +53,7 @@ cxf-rt-wsdl-3.2.5.jar https://cxf.apache.org/
mybatis-spring-1.2.5.jar http://www.mybatis.org/
chemistry-opencmis-server-support-1.0.0.jar http://chemistry.apache.org/
chemistry-opencmis-server-bindings-1.0.0.jar http://chemistry.apache.org/
quartz-2.3.1.jar http://quartz-scheduler.org/
quartz-2.3.2.jar http://quartz-scheduler.org/
jackson-core-2.10.1.jar https://github.com/FasterXML/jackson
jackson-annotations-2.10.1.jar https://github.com/FasterXML/jackson
jackson-databind-2.10.1.jar https://github.com/FasterXML/jackson

View File

@@ -14,7 +14,7 @@
<packaging>pom</packaging>
<name>Alfresco Solr Search parent</name>
<properties>
<slf4j.version>1.7.29</slf4j.version>
<slf4j.version>1.7.30</slf4j.version>
<cxf.version>3.2.5</cxf.version>
</properties>
<modules>