Merge branch 'feature/SEARCH_1687_ContentStore_Removal' of https://git.alfresco.com/search_discovery/insightengine into feature/SEARCH_1693_Highlighter

This commit is contained in:
agazzarini
2020-01-08 12:49:04 +01:00
8 changed files with 25 additions and 1414 deletions

View File

@@ -67,9 +67,6 @@ public class FakeReadOnlySolrContentStore extends SolrContentStore
.getFields()
.entrySet()
.stream()
.filter(entry -> {
SchemaField field = entry.getValue();
return field.useDocValuesAsStored();})
.map(Map.Entry::getKey)
.collect(Collectors.toSet());

View File

@@ -1,400 +0,0 @@
/*
* Copyright (C) 2005-2013 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.apache.solr.handler.component;
import static org.alfresco.repo.search.adaptor.lucene.QueryConstants.FIELD_SOLR4_ID;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import org.alfresco.solr.AlfrescoCoreAdminHandler;
import org.alfresco.solr.AlfrescoSolrDataModel;
import org.alfresco.solr.AlfrescoSolrDataModel.TenantAclIdDbId;
import org.alfresco.solr.SolrInformationServer;
import org.alfresco.solr.content.SolrContentStore;
import org.apache.commons.lang.StringUtils;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexableField;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.handler.clustering.ClusteringEngine;
import org.apache.solr.handler.clustering.ClusteringParams;
import org.apache.solr.handler.clustering.DocumentClusteringEngine;
import org.apache.solr.handler.clustering.SearchClusteringEngine;
import org.apache.solr.handler.clustering.carrot2.CarrotClusteringEngine;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.DocIterator;
import org.apache.solr.search.DocList;
import org.apache.solr.search.DocListAndSet;
import org.apache.solr.util.plugin.SolrCoreAware;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Maps;
/**
* @author Andy
*
*/
public class AlfrescoSolrClusteringComponent extends SearchComponent implements
SolrCoreAware {
private static final Logger log = LoggerFactory.getLogger(MethodHandles
.lookup().lookupClass());
/**
* Base name for all component parameters. This name is also used to
* register this component with SearchHandler.
*/
public static final String COMPONENT_NAME = "clustering";
/**
* Declaration-order list of search clustering engines.
*/
private final LinkedHashMap<String, SearchClusteringEngine> searchClusteringEngines = Maps
.newLinkedHashMap();
/**
* Declaration order list of document clustering engines.
*/
private final LinkedHashMap<String, DocumentClusteringEngine> documentClusteringEngines = Maps
.newLinkedHashMap();
/**
* An unmodifiable view of {@link #searchClusteringEngines}.
*/
private final Map<String, SearchClusteringEngine> searchClusteringEnginesView = Collections
.unmodifiableMap(searchClusteringEngines);
/**
* Initialization parameters temporarily saved here, the component is
* initialized in {@link #inform(SolrCore)} because we need to know the
* core's {@link SolrResourceLoader}.
*
* @see #init(NamedList)
*/
private NamedList<Object> initParams;
@Override
@SuppressWarnings({ "rawtypes", "unchecked" })
public void init(NamedList args) {
this.initParams = args;
super.init(args);
}
@SuppressWarnings("unchecked")
@Override
public void inform(SolrCore core) {
if (initParams != null) {
log.info("Initializing Clustering Engines");
// Our target list of engines, split into search-results and
// document clustering.
SolrResourceLoader loader = core.getResourceLoader();
for (Map.Entry<String, Object> entry : initParams) {
if ("engine".equals(entry.getKey())) {
NamedList<Object> engineInitParams = (NamedList<Object>) entry
.getValue();
Boolean optional = engineInitParams
.getBooleanArg("optional");
optional = (optional == null ? Boolean.FALSE : optional);
String engineClassName = StringUtils.defaultIfBlank(
(String) engineInitParams.get("classname"),
CarrotClusteringEngine.class.getName());
// Instantiate the clustering engine and split to
// appropriate map.
final ClusteringEngine engine = loader.newInstance(
engineClassName, ClusteringEngine.class);
final String name = StringUtils.defaultIfBlank(
engine.init(engineInitParams, core), "");
if (!engine.isAvailable()) {
if (optional) {
log.info("Optional clustering engine not available: "
+ name);
} else {
throw new SolrException(ErrorCode.SERVER_ERROR,
"A required clustering engine failed to initialize, check the logs: "
+ name);
}
}
final ClusteringEngine previousEntry;
if (engine instanceof SearchClusteringEngine) {
previousEntry = searchClusteringEngines.put(name,
(SearchClusteringEngine) engine);
} else if (engine instanceof DocumentClusteringEngine) {
previousEntry = documentClusteringEngines.put(name,
(DocumentClusteringEngine) engine);
} else {
log.warn("Unknown type of a clustering engine for class: "
+ engineClassName);
continue;
}
if (previousEntry != null) {
log.warn("Duplicate clustering engine component named '"
+ name + "'.");
}
}
}
// Set up the default engine key for both types of engines.
setupDefaultEngine("search results clustering",
searchClusteringEngines);
setupDefaultEngine("document clustering", documentClusteringEngines);
log.info("Finished Initializing Clustering Engines");
}
}
@Override
public void prepare(ResponseBuilder rb) throws IOException {
SolrParams params = rb.req.getParams();
if (!params.getBool(COMPONENT_NAME, false)) {
return;
}
}
@Override
public void process(ResponseBuilder rb) throws IOException {
SolrParams params = rb.req.getParams();
if (!params.getBool(COMPONENT_NAME, false)) {
return;
}
final String name = getClusteringEngineName(rb);
boolean useResults = params.getBool(
ClusteringParams.USE_SEARCH_RESULTS, false);
if (useResults == true) {
SearchClusteringEngine engine = searchClusteringEngines.get(name);
if (engine != null) {
checkAvailable(name, engine);
DocListAndSet results = rb.getResults();
Map<SolrDocument, Integer> docIds = Maps
.newHashMapWithExpectedSize(results.docList.size());
SolrDocumentList solrDocList = docListToSolrDocumentList(
results.docList, rb.req, docIds);
Object clusters = engine.cluster(rb.getQuery(), solrDocList,
docIds, rb.req);
rb.rsp.add("clusters", clusters);
} else {
log.warn("No engine named: " + name);
}
}
boolean useCollection = params.getBool(ClusteringParams.USE_COLLECTION,
false);
if (useCollection == true) {
DocumentClusteringEngine engine = documentClusteringEngines
.get(name);
if (engine != null) {
checkAvailable(name, engine);
boolean useDocSet = params.getBool(
ClusteringParams.USE_DOC_SET, false);
NamedList<?> nl = null;
// TODO: This likely needs to be made into a background task
// that runs in an executor
if (useDocSet == true) {
nl = engine.cluster(rb.getResults().docSet, params);
} else {
nl = engine.cluster(params);
}
rb.rsp.add("clusters", nl);
} else {
log.warn("No engine named: " + name);
}
}
}
private void checkAvailable(String name, ClusteringEngine engine) {
if (!engine.isAvailable()) {
throw new SolrException(ErrorCode.SERVER_ERROR,
"Clustering engine declared, but not available, check the logs: "
+ name);
}
}
private String getClusteringEngineName(ResponseBuilder rb) {
return rb.req.getParams().get(ClusteringParams.ENGINE_NAME,
ClusteringEngine.DEFAULT_ENGINE_NAME);
}
public SolrDocumentList docListToSolrDocumentList(DocList docs,
SolrQueryRequest req, Map<SolrDocument, Integer> ids)
throws IOException {
SolrDocumentList list = new SolrDocumentList();
list.setNumFound(docs.matches());
list.setMaxScore(docs.maxScore());
list.setStart(docs.offset());
DocIterator dit = docs.iterator();
while (dit.hasNext()) {
int docid = dit.nextDoc();
Document luceneDoc = req.getSearcher().doc(docid);
SolrInputDocument input = getSolrInputDocument(luceneDoc, req);
SolrDocument doc = new SolrDocument();
for (String fieldName : input.getFieldNames()) {
doc.addField(fieldName, input.getFieldValue(fieldName));
}
doc.addField("score", dit.score());
list.add(doc);
if (ids != null) {
ids.put(doc, Integer.valueOf(docid));
}
}
return list;
}
private SolrInputDocument getSolrInputDocument(Document doc,
SolrQueryRequest req) throws IOException {
try {
String id = getFieldValueString(doc, FIELD_SOLR4_ID);
TenantAclIdDbId tenantAndDbId = AlfrescoSolrDataModel
.decodeNodeDocumentId(id);
CoreContainer coreContainer = req.getSearcher().getCore().getCoreContainer();
AlfrescoCoreAdminHandler coreAdminHandler = (AlfrescoCoreAdminHandler) coreContainer.getMultiCoreHandler();
SolrInformationServer srv = (SolrInformationServer) coreAdminHandler.getInformationServers().get(req.getSearcher().getCore().getName());
SolrContentStore solrContentStore = srv.getSolrContentStore();
SolrInputDocument sid = solrContentStore.retrieveDocFromSolrContentStore(
tenantAndDbId.tenant, tenantAndDbId.dbId);
return sid;
} catch (StringIndexOutOfBoundsException e) {
throw new IOException(e);
}
}
private String getFieldValueString(Document doc, String fieldName) {
IndexableField field = (IndexableField) doc.getField(fieldName);
String value = null;
if (field != null) {
value = field.stringValue();
}
return value;
}
@Override
public void finishStage(ResponseBuilder rb) {
SolrParams params = rb.req.getParams();
if (!params.getBool(COMPONENT_NAME, false)
|| !params.getBool(ClusteringParams.USE_SEARCH_RESULTS, false)) {
return;
}
if (rb.stage == ResponseBuilder.STAGE_GET_FIELDS) {
String name = getClusteringEngineName(rb);
SearchClusteringEngine engine = searchClusteringEngines.get(name);
if (engine != null) {
checkAvailable(name, engine);
SolrDocumentList solrDocList = (SolrDocumentList) rb.rsp
.getValues().get("response");
// TODO: Currently, docIds is set to null in distributed
// environment.
// This causes CarrotParams.PRODUCE_SUMMARY doesn't work.
// To work CarrotParams.PRODUCE_SUMMARY under distributed mode,
// we can choose either one of:
// (a) In each shard, ClusteringComponent produces summary and
// finishStage()
// merges these summaries.
// (b) Adding doHighlighting(SolrDocumentList, ...) method to
// SolrHighlighter and
// making SolrHighlighter uses "external text" rather than
// stored values to produce snippets.
Map<SolrDocument, Integer> docIds = null;
Object clusters = engine.cluster(rb.getQuery(), solrDocList,
docIds, rb.req);
rb.rsp.add("clusters", clusters);
} else {
log.warn("No engine named: " + name);
}
}
}
/**
* @return Expose for tests.
*/
Map<String, SearchClusteringEngine> getSearchClusteringEnginesView() {
return searchClusteringEnginesView;
}
@Override
public String getDescription() {
return "A Clustering component";
}
/**
* Setup the default clustering engine.
*
* @see "https://issues.apache.org/jira/browse/SOLR-5219"
*/
private static <T extends ClusteringEngine> void setupDefaultEngine(
String type, LinkedHashMap<String, T> map) {
// If there's already a default algorithm, leave it as is.
String engineName = ClusteringEngine.DEFAULT_ENGINE_NAME;
T defaultEngine = map.get(engineName);
if (defaultEngine == null || !defaultEngine.isAvailable()) {
// If there's no default algorithm, and there are any algorithms
// available,
// the first definition becomes the default algorithm.
for (Map.Entry<String, T> e : map.entrySet()) {
if (e.getValue().isAvailable()) {
engineName = e.getKey();
defaultEngine = e.getValue();
map.put(ClusteringEngine.DEFAULT_ENGINE_NAME, defaultEngine);
break;
}
}
}
if (defaultEngine != null) {
log.info("Default engine for " + type + ": " + engineName + " ["
+ defaultEngine.getClass().getSimpleName() + "]");
} else {
log.warn("No default engine for " + type + ".");
}
}
}

View File

@@ -1372,8 +1372,7 @@
http://wiki.apache.org/solr/ClusteringComponent
http://carrot2.github.io/solr-integration-strategies/
-->
<searchComponent name="clustering"
class="org.apache.solr.handler.component.AlfrescoSolrClusteringComponent" >
<searchComponent name="clustering" class="solr.clustering.ClusteringComponent" >
<lst name="engine">
<str name="name">lingo</str>

View File

@@ -1374,8 +1374,7 @@
http://wiki.apache.org/solr/ClusteringComponent
http://carrot2.github.io/solr-integration-strategies/
-->
<searchComponent name="clustering"
class="org.apache.solr.handler.component.AlfrescoSolrClusteringComponent" >
<searchComponent name="clustering" class="solr.clustering.ClusteringComponent" >
<lst name="engine">
<str name="name">lingo</str>

View File

@@ -1,294 +0,0 @@
/*
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.solr.content;
import org.apache.commons.io.FileUtils;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Query;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.junit.MockitoJUnitRunner;
import java.io.File;
import java.io.IOException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Solr ContentStore {@link ChangeSet} test case.
*
* @author Andrea Gazzarini
* @since 1.5
*/
@RunWith(MockitoJUnitRunner.class)
@Ignore
// FIXME: Remove once SEARCH-1687 will be completed
public class SolrContentStoreChangeSetTest
{
private ChangeSet changeSet;
private final String contentStoreRootFolder = "/tmp";
private final File rootFolder = new File(contentStoreRootFolder, ChangeSet.CHANGESETS_ROOT_FOLDER_NAME);
@Before
public void setUp()
{
changeSet = new ChangeSet.Builder().withContentStoreRoot(contentStoreRootFolder).build();
}
@After
public void tearDown() throws IOException
{
changeSet.close();
FileUtils.cleanDirectory(rootFolder);
}
@Test(expected = IllegalArgumentException.class)
public void nullContentStoreRootFolder_shouldThrowAnException()
{
new ChangeSet.Builder().withContentStoreRoot(null).build();
}
@Test(expected = IllegalArgumentException.class)
public void nonWriteableContentStoreRootFolder_shouldThrowAnException()
{
new ChangeSet.Builder().withContentStoreRoot("/root").build();
}
@Test
public void newAddOrReplaceRecord_shouldRemovePreviousDeletion()
{
String path ="some/random/dbid.gz";
assertTrue(changeSet.deletes.isEmpty());
assertTrue(changeSet.adds.isEmpty());
changeSet.delete(path);
assertTrue(String.valueOf(changeSet.deletes), changeSet.deletes.contains(path));
assertTrue(String.valueOf(changeSet.adds), changeSet.adds.isEmpty());
changeSet.addOrReplace(path);
assertTrue(String.valueOf(changeSet.deletes), changeSet.deletes.isEmpty());
assertTrue(String.valueOf(changeSet.adds),changeSet.adds.contains(path));
}
@Test
public void deletedRecord_shouldRemovePreviousAdd()
{
String path ="some/random/dbid.gz";
assertTrue(changeSet.deletes.isEmpty());
assertTrue(changeSet.adds.isEmpty());
changeSet.addOrReplace(path);
assertTrue(String.valueOf(changeSet.deletes), changeSet.deletes.isEmpty());
assertTrue(String.valueOf(changeSet.adds), changeSet.adds.contains(path));
changeSet.delete(path);
assertTrue(String.valueOf(changeSet.deletes), changeSet.deletes.contains(path));
assertTrue(String.valueOf(changeSet.adds),changeSet.adds.isEmpty());
}
@Test
public void transientChangeset_doesNothingOnFlush() throws IOException
{
ChangeSet changeset = new ChangeSet.Builder().build();
changeset.addOrReplace("A");
changeset.delete("B");
assertEquals(1, changeset.deletes.size());
assertEquals(1, changeset.adds.size());
changeset.flush();
assertEquals(1, changeset.deletes.size());
assertEquals(1, changeset.adds.size());
}
@Test(expected = UnsupportedOperationException.class)
public void emptyChangeset_isImmutableDoesntAllowAdds()
{
ChangeSet changeset = new ChangeSet.Builder().empty().build();
changeset.addOrReplace("A");
}
@Test(expected = UnsupportedOperationException.class)
public void emptyChangeset_isImmutableDoesntAllowDeletes()
{
ChangeSet changeset = new ChangeSet.Builder().empty().build();
changeset.delete("A");
}
@Test
public void lastCommittedVersionNotPresentAtVeryBeginning()
{
assertEquals(SolrContentStore.NO_VERSION_AVAILABLE, changeSet.getLastCommittedVersion());
}
@Test
public void lastCommittedVersionNotAvailable_shouldReturnNO_AVAILABLE_VERSION() throws IOException
{
changeSet.selectEverything = mock(Query.class);
when(changeSet.selectEverything.rewrite(any(IndexReader.class))).thenThrow(new RuntimeException());
assertEquals(SolrContentStore.NO_VERSION_AVAILABLE, changeSet.getLastCommittedVersion());
}
@Test
public void flushDoesNothingIfThereAreNoChanges() throws IOException
{
assertEquals(SolrContentStore.NO_VERSION_AVAILABLE, changeSet.getLastCommittedVersion());
changeSet.flush();
assertEquals(SolrContentStore.NO_VERSION_AVAILABLE, changeSet.getLastCommittedVersion());
}
@Test
public void invalidOrUnknownVersion() throws IOException
{
assertEquals(SolrContentStore.NO_VERSION_AVAILABLE, changeSet.getLastCommittedVersion());
assertTrue(changeSet.isUnknownVersion(SolrContentStore.NO_VERSION_AVAILABLE));
assertTrue(changeSet.isUnknownVersion(SolrContentStore.NO_VERSION_AVAILABLE - 1L));
assertTrue(changeSet.isUnknownVersion(System.currentTimeMillis()));
changeSet.addOrReplace("A1");
changeSet.addOrReplace("A2");
changeSet.delete("A3");
changeSet.delete("A1");
changeSet.flush();
long lastCommittedVersionAfterFirstFlush = changeSet.getLastCommittedVersion();
assertNotEquals(SolrContentStore.NO_VERSION_AVAILABLE, lastCommittedVersionAfterFirstFlush);
assertTrue(changeSet.isUnknownVersion(System.currentTimeMillis()));
}
@Test
public void validVersion() throws IOException
{
assertEquals(SolrContentStore.NO_VERSION_AVAILABLE, changeSet.getLastCommittedVersion());
assertTrue(changeSet.isUnknownVersion(System.currentTimeMillis()));
changeSet.addOrReplace("A1");
changeSet.addOrReplace("A2");
changeSet.delete("A3");
changeSet.delete("A1");
changeSet.flush();
long lastCommittedVersionAfterFirstFlush = changeSet.getLastCommittedVersion();
changeSet.addOrReplace("B1");
changeSet.addOrReplace("B2");
changeSet.delete("B3");
changeSet.delete("B1");
changeSet.flush();
long lastCommittedVersionAfterSecondFlush = changeSet.getLastCommittedVersion();
assertNotEquals(lastCommittedVersionAfterSecondFlush, lastCommittedVersionAfterFirstFlush);
assertFalse(changeSet.isUnknownVersion(lastCommittedVersionAfterFirstFlush));
assertFalse(changeSet.isUnknownVersion(lastCommittedVersionAfterSecondFlush));
}
@Test
public void inCaseOfFailure_inputVersionIsConsideredUnknown() throws IOException
{
assertEquals(SolrContentStore.NO_VERSION_AVAILABLE, changeSet.getLastCommittedVersion());
assertTrue(changeSet.isUnknownVersion(System.currentTimeMillis()));
changeSet.addOrReplace("A1");
changeSet.addOrReplace("A2");
changeSet.delete("A3");
changeSet.delete("A1");
changeSet.flush();
long lastCommittedVersion = changeSet.getLastCommittedVersion();
// Force a NPE exception...
changeSet.searcher = null;
// ...so a valid version is considered unknown even if it is valid
assertTrue(changeSet.isUnknownVersion(lastCommittedVersion));
}
@Test
public void persistentChangesetsAreMergedBeforeReturningToRequestor() throws IOException
{
assertEquals(SolrContentStore.NO_VERSION_AVAILABLE, changeSet.getLastCommittedVersion());
changeSet.addOrReplace("A1");
changeSet.addOrReplace("A2");
changeSet.delete("A3");
changeSet.delete("A1");
changeSet.flush();
long lastCommittedVersionAfterFirstFlush = changeSet.getLastCommittedVersion();
assertNotEquals(SolrContentStore.NO_VERSION_AVAILABLE, lastCommittedVersionAfterFirstFlush);
changeSet.addOrReplace("A1");
changeSet.addOrReplace("A3");
changeSet.delete("A4");
changeSet.flush();
long lastCommittedVersionAfterSecondFlush = changeSet.getLastCommittedVersion();
assertNotEquals(lastCommittedVersionAfterFirstFlush, lastCommittedVersionAfterSecondFlush);
ChangeSet changesSinceTheVeryBeginning = changeSet.since(SolrContentStore.NO_VERSION_AVAILABLE);
// ADDS = [A1, A2, A3]
// DELS = [A4]
assertEquals(3, changesSinceTheVeryBeginning.adds.size());
assertEquals(1, changesSinceTheVeryBeginning.deletes.size());
assertTrue(changesSinceTheVeryBeginning.adds.contains("A1"));
assertTrue(changesSinceTheVeryBeginning.adds.contains("A2"));
assertTrue(changesSinceTheVeryBeginning.adds.contains("A3"));
assertTrue(changesSinceTheVeryBeginning.deletes.contains("A4"));
ChangeSet changesAfterSecondFlush = changeSet.since(lastCommittedVersionAfterFirstFlush);
// ADDS = [A1, A3]
// DELS = [A4]
assertEquals(2, changesAfterSecondFlush.adds.size());
assertEquals(1, changesAfterSecondFlush.deletes.size());
assertTrue(changesAfterSecondFlush.adds.contains("A1"));
assertTrue(changesAfterSecondFlush.adds.contains("A3"));
assertTrue(changesAfterSecondFlush.deletes.contains("A4"));
}
}

View File

@@ -1,425 +0,0 @@
/*
* Copyright (C) 2005-2014 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.solr.content;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import org.alfresco.solr.client.NodeMetaData;
import org.apache.commons.io.FileUtils;
import org.apache.solr.common.SolrInputDocument;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.junit.MockitoJUnitRunner;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
/**
* Tests {@link SolrContentStoreTest}
*
* @author Derek Hulley
* @author Andrea Gazzarini
* @since 1.5
*/
@RunWith(MockitoJUnitRunner.class)
// FIXME: Remove once SEARCH-1687 will be completed
public class SolrContentStoreTest
{
private static final String DEFAULT_TENANT = "_DEFAULT_";
private String solrHome;
private SolrContentStore contentStore;
@Before
public void setUp()
{
solrHome = new File("./target/contentstoretest/" + System.currentTimeMillis()).getAbsolutePath();
contentStore = new SolrContentStore(solrHome);
}
@After
public void tearDown() throws IOException
{
contentStore.close();
File rootDir = new File(new SolrContentStore(solrHome).getRootLocation());
FileUtils.deleteDirectory(rootDir);
}
@Test
public void atVeryBeginningAccessModeIsNotSet()
{
assertSame(contentStore.notYetSet, contentStore.currentAccessMode);
}
@Test
public void whenAccessModeIsNotSetMethodCallsThrowsExceptionOrDoNothing()
{
assertSame(contentStore.notYetSet, contentStore.currentAccessMode);
expectIllegalState(contentStore::getLastCommittedVersion);
expectIllegalState(contentStore::setLastCommittedVersion, System.currentTimeMillis());
expectIllegalState(contentStore::getChanges, System.currentTimeMillis());
expectIllegalState(contentStore::removeDocFromContentStore, mock(NodeMetaData.class));
expectIllegalState(contentStore::storeDocOnSolrContentStore, mock(NodeMetaData.class), mock(SolrInputDocument.class));
try
{
contentStore.flushChangeSet();
fail();
}
catch (IOException exception)
{
fail();
}
catch(IllegalStateException expected)
{
// Nothing to be done here
}
try
{
contentStore.storeDocOnSolrContentStore(DEFAULT_TENANT, System.currentTimeMillis(), mock(SolrInputDocument.class));
fail();
}
catch(IllegalStateException expected)
{
// Nothing to be done here
}
}
@Test
public void lastCommittedVersionInReadOnlyModeNotFound()
{
contentStore.toggleReadOnlyMode(true);
assertEquals(SolrContentStore.NO_VERSION_AVAILABLE, contentStore.getLastCommittedVersion());
}
@Test
public void lastCommittedVersionInReadOnlyModeNotFoundBecauseException() throws IOException
{
contentStore.toggleReadOnlyMode(true);
Files.write(new File(contentStore.getRootLocation(), ".version").toPath(), "NAN".getBytes());
assertEquals(SolrContentStore.NO_VERSION_AVAILABLE, contentStore.getLastCommittedVersion());
}
@Test
public void lastCommittedVersionInReadOnlyModeNotFoundBecauseFileIsEmpty() throws IOException
{
contentStore.toggleReadOnlyMode(true);
File emptyVersionFile = new File(contentStore.getRootLocation(), ".version");
emptyVersionFile.createNewFile();
assertEquals(SolrContentStore.NO_VERSION_AVAILABLE, contentStore.getLastCommittedVersion());
}
@Test
public void getLastCommittedVersionInReadOnlyMode() throws IOException
{
contentStore.toggleReadOnlyMode(true);
long expectedLastCommittedVersion = System.currentTimeMillis();
Files.write(new File(contentStore.getRootLocation(), ".version").toPath(), Long.toString(expectedLastCommittedVersion).getBytes());
assertEquals(expectedLastCommittedVersion, contentStore.getLastCommittedVersion());
}
@Test
public void setLastCommittedVersionInReadOnlyMode()
{
contentStore.toggleReadOnlyMode(true);
long expectedLastCommittedVersion = System.currentTimeMillis();
contentStore.setLastCommittedVersion(expectedLastCommittedVersion);
File versionFile = new File(contentStore.getRootLocation(), ".version");
assertTrue(versionFile.canRead());
assertEquals(expectedLastCommittedVersion, contentStore.getLastCommittedVersion());
}
@Test
public void getChangesInReadOnlyModeReturnsAnEmptyMap()
{
contentStore.toggleReadOnlyMode(true);
assertEquals(Collections.<String, List<Map<String, Object>>>emptyMap(), contentStore.getChanges(System.currentTimeMillis()));
}
@Test
public void transitionFromNotSetToReadOnlyMode()
{
assertSame(contentStore.notYetSet, contentStore.currentAccessMode);
contentStore.toggleReadOnlyMode(true);
assertSame(contentStore.readOnly, contentStore.currentAccessMode);
}
@Test
public void transitionFromNotSetToReadWriteMode()
{
assertSame(contentStore.notYetSet, contentStore.currentAccessMode);
contentStore.toggleReadOnlyMode(false);
assertSame(contentStore.readWrite, contentStore.currentAccessMode);
}
@Test
public void transitionFromReadOnlyToReadWriteMode()
{
assertSame(contentStore.notYetSet, contentStore.currentAccessMode);
contentStore.toggleReadOnlyMode(true);
assertSame(contentStore.readOnly, contentStore.currentAccessMode);
contentStore.toggleReadOnlyMode(false);
assertSame(contentStore.readWrite, contentStore.currentAccessMode);
}
@Test
public void transitionFromReadOnlyToReadOnlyHasNoEffect()
{
assertSame(contentStore.notYetSet, contentStore.currentAccessMode);
contentStore.toggleReadOnlyMode(true);
assertSame(contentStore.readOnly, contentStore.currentAccessMode);
contentStore.toggleReadOnlyMode(true);
assertSame(contentStore.readOnly, contentStore.currentAccessMode);
}
@Test
public void transitionFromReadWriteToReadOnlyModeHasNoEffect()
{
assertSame(contentStore.notYetSet, contentStore.currentAccessMode);
contentStore.toggleReadOnlyMode(false);
assertSame(contentStore.readWrite, contentStore.currentAccessMode);
contentStore.toggleReadOnlyMode(true);
assertSame(contentStore.readWrite, contentStore.currentAccessMode);
}
@Test
public void transitionFromReadWriteToReadWriteHasNoEffect()
{
assertSame(contentStore.notYetSet, contentStore.currentAccessMode);
contentStore.toggleReadOnlyMode(false);
assertSame(contentStore.readWrite, contentStore.currentAccessMode);
contentStore.toggleReadOnlyMode(false);
assertSame(contentStore.readWrite, contentStore.currentAccessMode);
}
@Test(expected = RuntimeException.class)
public void contentStoreCreation_solrHomeNull_shouldThrowException()
{
new SolrContentStore(null);
}
@Test(expected = RuntimeException.class)
public void contentStoreCreation_solrHomeEmpty_shouldThrowException()
{
new SolrContentStore("");
}
@Test
public void contentStoreCreation_solrHomeNotExistSolrContentDirNotDefined_shouldUseDefaultContentStore()
{
SolrContentStore solrContentStore = new SolrContentStore(solrHome + "/notExist");
Assert.assertThat(solrContentStore.getRootLocation(), is(solrHome + "/" + SolrContentStore.CONTENT_STORE));
}
@Test
public void contentStoreCreation_solrHomeNotExistSolrContentDirDefined_shouldCreateContentStore()
{
String testContentDir = solrHome + "/test/content/dir";
System.setProperty(SolrContentStore.SOLR_CONTENT_DIR, testContentDir);
SolrContentStore solrContentStore = new SolrContentStore(solrHome + "/notExist");
Assert.assertThat(solrContentStore.getRootLocation(), is(testContentDir));
System.clearProperty(SolrContentStore.SOLR_CONTENT_DIR);
}
@Test
public void contentStoreCreation_solrHomeExistSolrContentDirDefined_shouldCreateContentStore()
{
String testContentDir = solrHome + "/test/content/dir";
System.setProperty(SolrContentStore.SOLR_CONTENT_DIR, testContentDir);
SolrContentStore solrContentStore = new SolrContentStore(solrHome);
Assert.assertThat(solrContentStore.getRootLocation(), is(testContentDir));
System.clearProperty(SolrContentStore.SOLR_CONTENT_DIR);
}
@Test
public void contentStoreCreation_solrHomeExistSolrContentDirNotDefined_shouldUseDefaultContentStore()
{
String existSolrHomePath = solrHome + "/exist";
File existSolrHome = new File(existSolrHomePath);
existSolrHome.mkdir();
SolrContentStore solrContentStore = new SolrContentStore(existSolrHomePath);
Assert.assertThat(solrContentStore.getRootLocation(), is(solrHome + "/" + SolrContentStore.CONTENT_STORE));
}
@Test
public void rootLocation()
{
File rootDir = new File(contentStore.getRootLocation());
assertTrue(rootDir.exists());
assertTrue(rootDir.isDirectory());
}
@Test
public void storeDocOnSolrContentStore()
{
contentStore.toggleReadOnlyMode(false);
SolrInputDocument doc = mock(SolrInputDocument.class);
long dbid = 111;
String tenant = "me";
SolrInputDocument document = contentStore.retrieveDocFromSolrContentStore(tenant, dbid);
Assert.assertNull(document);
contentStore.storeDocOnSolrContentStore(tenant, dbid, doc);
document = contentStore.retrieveDocFromSolrContentStore(tenant, dbid);
Assert.assertNotNull(document);
}
@Test
public void storeDocOnSolrContentStoreNodeMetaData()
{
contentStore.toggleReadOnlyMode(false);
SolrInputDocument doc = mock(SolrInputDocument.class);
NodeMetaData nodeMetaData = mock(NodeMetaData.class);
SolrInputDocument document = contentStore.retrieveDocFromSolrContentStore(DEFAULT_TENANT, 0);
Assert.assertNull(document);
contentStore.storeDocOnSolrContentStore(nodeMetaData, doc);
document = contentStore.retrieveDocFromSolrContentStore(DEFAULT_TENANT, 0);
Assert.assertNotNull(document);
}
@Test
public void removeDocFromContentStore()
{
contentStore.toggleReadOnlyMode(false);
SolrInputDocument doc = mock(SolrInputDocument.class);
NodeMetaData nodeMetaData = mock(NodeMetaData.class);
contentStore.storeDocOnSolrContentStore(nodeMetaData, doc);
SolrInputDocument document = contentStore.retrieveDocFromSolrContentStore(DEFAULT_TENANT, 0);
Assert.assertNotNull(document);
contentStore.removeDocFromContentStore(nodeMetaData);
document = contentStore.retrieveDocFromSolrContentStore(DEFAULT_TENANT, 0);
Assert.assertNull(document);
}
private void expectIllegalState(Supplier<?> function)
{
try
{
function.get();
fail();
}
catch (IllegalStateException expected)
{
// Nothing to do, this is expected
}
}
private <T> void expectIllegalState(Consumer<T> function, T arg)
{
try
{
function.accept(arg);
fail();
}
catch (IllegalStateException expected)
{
// Nothing to do, this is expected
}
}
private <I,O> void expectIllegalState(Function<I,O> function, I arg)
{
try
{
function.apply(arg);
fail();
}
catch (IllegalStateException expected)
{
// Nothing to do, this is expected
}
}
private <A, B> void expectIllegalState(BiConsumer<A, B> function, A arg1, B arg2)
{
try
{
function.accept(arg1, arg2);
fail();
}
catch (IllegalStateException expected)
{
// Nothing to do, this is expected
}
}
}

View File

@@ -1,286 +0,0 @@
/*
* Copyright (C) 2005-2019 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.solr.handler;
import org.alfresco.solr.AbstractAlfrescoDistributedIT;
import org.alfresco.solr.client.Acl;
import org.alfresco.solr.client.AclChangeSet;
import org.alfresco.solr.client.AclReaders;
import org.alfresco.solr.client.Node;
import org.alfresco.solr.client.NodeMetaData;
import org.alfresco.solr.client.SOLRAPIQueueClient;
import org.alfresco.solr.client.Transaction;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.util.Time;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.TermQuery;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
import static java.util.Collections.singletonList;
import static org.alfresco.solr.AlfrescoSolrUtils.getAcl;
import static org.alfresco.solr.AlfrescoSolrUtils.getAclChangeSet;
import static org.alfresco.solr.AlfrescoSolrUtils.getAclReaders;
import static org.alfresco.solr.AlfrescoSolrUtils.getNode;
import static org.alfresco.solr.AlfrescoSolrUtils.getNodeMetaData;
import static org.alfresco.solr.AlfrescoSolrUtils.getTransaction;
import static org.alfresco.solr.AlfrescoSolrUtils.indexAclChangeSet;
import static org.alfresco.solr.utils.AlfrescoFileUtils.areDirectoryEquals;
import static org.carrot2.shaded.guava.common.collect.ImmutableList.of;
/**
* @author Elia Porciani
*
* This test check if the synchronization of contentstore between master and slave is done correctly.
*/
@SolrTestCaseJ4.SuppressSSL
public class ContentStoreReplicationIT extends AbstractAlfrescoDistributedIT
{
protected static JettySolrRunner master;
protected static JettySolrRunner slave;
protected static SolrClient masterClient;
protected static SolrClient slaveClient;
protected static Path masterSolrHome;
protected static Path slaveSolrHome;
protected static Path masterContentStore;
protected static Path slaveContentStore;
private static Acl acl;
private static final int MILLIS_TIMOUT = 80000;
@BeforeClass
public static void createMasterSlaveEnv() throws Exception
{
Properties properties = new Properties();
clientShards = new ArrayList<>();
solrShards = new ArrayList<>();
solrCollectionNameToStandaloneClient = new HashMap<>();
jettyContainers = new HashMap<>();
String coreName = "master";
boolean basicAuth = Boolean.parseBoolean(properties.getProperty("BasicAuth", "false"));
String masterDir = "master" + Time.now();
String slaveDir = "slave" + Time.now();
String masterKey = masterDir + "/solrHome";
String slaveKey = slaveDir + "/solrHome";
master = createJetty(masterKey, basicAuth);
addCoreToJetty(masterKey, coreName, coreName, null);
startJetty(master);
String slaveCoreName = "slave";
slave = createJetty(slaveKey, basicAuth);
addCoreToJetty(slaveKey, slaveCoreName, slaveCoreName, null);
setMasterUrl(slaveKey, slaveCoreName, master.getBaseUrl().toString() + "/master");
startJetty(slave);
String masterStr = buildUrl(master.getLocalPort()) + "/" + coreName;
String slaveStr = buildUrl(slave.getLocalPort()) + "/" + slaveCoreName;
masterClient = createNewSolrClient(masterStr);
slaveClient = createNewSolrClient(slaveStr);
masterSolrHome = testDir.toPath().resolve(masterKey);
slaveSolrHome = testDir.toPath().resolve(slaveKey);
masterContentStore = testDir.toPath().resolve(masterDir + "/contentstore");
slaveContentStore = testDir.toPath().resolve(slaveDir + "/contentstore");
AclChangeSet aclChangeSet = getAclChangeSet(1);
acl = getAcl(aclChangeSet);
AclReaders aclReaders = getAclReaders(aclChangeSet, acl, singletonList("joel"), singletonList("phil"), null);
indexAclChangeSet(aclChangeSet,
of(acl),
of(aclReaders));
}
@AfterClass
public static void cleanupMasterSlave() throws Exception
{
master.stop();
slave.stop();
FileUtils.deleteQuietly(new File(masterSolrHome.getParent().toUri()));
FileUtils.deleteQuietly(new File(slaveSolrHome.getParent().toUri()));
SOLRAPIQueueClient.nodeMetaDataMap.clear();
SOLRAPIQueueClient.transactionQueue.clear();
SOLRAPIQueueClient.aclChangeSetQueue.clear();
SOLRAPIQueueClient.aclReadersMap.clear();
SOLRAPIQueueClient.aclMap.clear();
SOLRAPIQueueClient.nodeMap.clear();
SOLRAPIQueueClient.nodeContentMap.clear();
}
@Test
public void contentStoreReplicationTest() throws Exception
{
// ADD 250 nodes and check they are replicated
int numNodes = 250;
Transaction bigTxn = getTransaction(0, numNodes);
List<Node> nodes = new ArrayList<>();
List<NodeMetaData> nodeMetaDatas = new ArrayList<>();
for(int i = 0; i<numNodes; i++) {
Node node = getNode(i, bigTxn, acl, Node.SolrApiNodeStatus.UPDATED);
nodes.add(node);
NodeMetaData nodeMetaData = getNodeMetaData(node, bigTxn, acl, "mike", null, false);
node.setNodeRef(nodeMetaData.getNodeRef().toString());
nodeMetaDatas.add(nodeMetaData);
}
indexTransaction(bigTxn, nodes, nodeMetaDatas);
waitForDocCountCore(masterClient,
luceneToSolrQuery(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world"))),
numNodes, MILLIS_TIMOUT, System.currentTimeMillis());
long filesInMasterContentStore = Files.walk(Paths.get(masterContentStore.toUri().resolve("_DEFAULT_")))
.filter(Files::isRegularFile)
.count();
Assert.assertEquals( "master contentStore should have " + numNodes + "files", numNodes, filesInMasterContentStore);
assertTrue("slave content store is not in sync after timeout", waitForContentStoreSync(MILLIS_TIMOUT));
// ADD other 10 nodes
int numUpdates = 10;
int totalNodes = numNodes + numUpdates;
Transaction updateTx = getTransaction(0, numUpdates);
List<Node> updateNodes = new ArrayList<>();
List<NodeMetaData> updateNodeMetaDatas = new ArrayList<>();
for(int i = numNodes; i < totalNodes; i++) {
Node node = getNode(i, updateTx, acl, Node.SolrApiNodeStatus.UPDATED);
updateNodes.add(node);
NodeMetaData nodeMetaData = getNodeMetaData(node, updateTx, acl, "mike", null, false);
node.setNodeRef(nodeMetaData.getNodeRef().toString());
updateNodeMetaDatas.add(nodeMetaData);
}
indexTransaction(updateTx, updateNodes, updateNodeMetaDatas);
waitForDocCountCore(masterClient,
luceneToSolrQuery(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world"))),
numNodes + numUpdates, MILLIS_TIMOUT, System.currentTimeMillis());
filesInMasterContentStore = Files.walk(Paths.get(masterContentStore.toUri().resolve("_DEFAULT_")))
.filter(Files::isRegularFile)
.count();
Assert.assertEquals( "master contentStore should have " + totalNodes + "files", totalNodes, filesInMasterContentStore);
assertTrue("slave content store is not in sync after timeout", waitForContentStoreSync(MILLIS_TIMOUT));
// DELETES 30 nodes
int numDeletes = 30;
Transaction deleteTx = getTransaction(numDeletes, 0);
totalNodes = numNodes + numUpdates - numDeletes;
List<Node> deleteNodes = new ArrayList<>();
List<NodeMetaData> deleteNodeMetaDatas = new ArrayList<>();
for(int i = 0; i<numDeletes; i++) {
Node node = getNode(i, deleteTx, acl, Node.SolrApiNodeStatus.DELETED);
deleteNodes.add(node);
NodeMetaData nodeMetaData = getNodeMetaData(node, deleteTx, acl, "mike", null, false);
node.setNodeRef(nodeMetaData.getNodeRef().toString());
deleteNodeMetaDatas.add(nodeMetaData);
}
indexTransaction(deleteTx, deleteNodes, deleteNodeMetaDatas);
waitForDocCountCore(masterClient,
luceneToSolrQuery(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world"))),
totalNodes, MILLIS_TIMOUT, System.currentTimeMillis());
filesInMasterContentStore = Files.walk(Paths.get(masterContentStore.toUri().resolve("_DEFAULT_")))
.filter(Files::isRegularFile)
.count();
Assert.assertEquals( "master contentStore should have " + totalNodes + "files", totalNodes, filesInMasterContentStore);
assertTrue("slave content store is not in sync after timeout", waitForContentStoreSync(MILLIS_TIMOUT));
}
private static boolean waitForContentStoreSync(long waitMillis) throws InterruptedException
{
long startMillis = System.currentTimeMillis();
long timeout = startMillis + waitMillis;
long increment = 1;
while(new Date().getTime() < timeout)
{
try{
if (areDirectoryEquals(masterContentStore, slaveContentStore, new String[]{"gz"}, true))
{
return true;
}
} catch (Exception e){
// do nothing
}
Thread.sleep(500 * increment);
}
return false;
}
private static void setMasterUrl(String jettyKey, String coreName, String masterUrl) throws IOException
{
Path jettySolrHome = testDir.toPath().resolve(jettyKey);
Path coreHome = jettySolrHome.resolve(coreName);
Path confDir = coreHome.resolve("conf");
Path solrConfigPath = confDir.resolve("solrconfig.xml");
String content = new String(Files.readAllBytes(solrConfigPath));
content = content.replaceAll("\\{masterURL\\}", masterUrl);
Files.write(solrConfigPath, content.getBytes());
}
}

View File

@@ -4,44 +4,65 @@ set -e
# Slave replica service can be enabled using "REPLICATION_TYPE=slave" environment value.
SOLR_CONFIG_FILE=$PWD/solrhome/templates/rerank/conf/solrconfig.xml
SOLR_CORE_FILE=$PWD/solrhome/templates/rerank/conf/solrcore.properties
if [[ $REPLICATION_TYPE == "master" ]]; then
findStringMaster='<requestHandler name="\/replication" class="org\.alfresco\.solr\.handler\.AlfrescoReplicationHandler">/<requestHandler name="\/replication" class="org\.alfresco\.solr\.handler\.AlfrescoReplicationHandler">'
findStringMaster='<requestHandler name="\/replication" class="org\.alfresco\.solr\.handler\.AlfrescoReplicationHandler">'
replaceStringMaster="\n\t<lst name=\"master\"> \n"
if [[ $REPLICATION_AFTER == "" ]]; then
REPLICATION_AFTER=commit
REPLICATION_AFTER=commit,startup
fi
if [[ $REPLICATION_CONFIG_FILES == "" ]]; then
REPLICATION_CONFIG_FILES=schema.xml,stopwords.txt
fi
for i in $(echo $REPLICATION_AFTER | sed "s/,/ /g")
do
replaceStringMaster+="\t\t<str name=\"replicateAfter\">"$i"<\/str> \n"
done
if [[ ! -z "$REPLICATION_CONFIG_FILES" ]]; then
replaceStringMaster+="\t\t<str name=\"confFiles\">$REPLICATION_CONFIG_FILES<\/str> \n"
fi
replaceStringMaster+="\t<\/lst>"
sed -i "s/$findStringMaster/$findStringMaster$replaceStringMaster/g" $SOLR_CONFIG_FILE
sed -i "s/enable.alfresco.tracking=true/enable.alfresco.tracking=true\nenable.master=true\nenable.slave=false/g" $SOLR_CORE_FILE
fi
if [[ $REPLICATION_TYPE == "slave" ]]; then
if [[ $REPLICATION_MASTER_PROTOCOL == "" ]]; then
REPLICATION_MASTER_PROTOCOL=http
fi
if [[ $REPLICATION_MASTER_HOST == "" ]]; then
REPLICATION_MASTER_HOST=localhost
fi
if [[ $REPLICATION_MASTER_PORT == "" ]]; then
REPLICATION_MASTER_PORT=8083
fi
if [[ $REPLICATION_CORE_NAME == "" ]]; then
REPLICATION_CORE_NAME=alfresco
fi
if [[ $REPLICATION_POLL_INTERVAL == "" ]]; then
REPLICATION_POLL_INTERVAL=00:00:30
fi
sed -i 's/<requestHandler name="\/replication" class="org\.alfresco\.solr\.handler\.AlfrescoReplicationHandler">/<requestHandler name="\/replication" class="org\.alfresco\.solr\.handler\.AlfrescoReplicationHandler">\
<lst name="slave">\
<str name="masterUrl">'$REPLICATION_MASTER_PROTOCOL':\/\/'$REPLICATION_MASTER_HOST':'$REPLICATION_MASTER_PORT'\/solr\/'$REPLICATION_CORE_NAME'<\/str>\
<str name="pollInterval">'$REPLICATION_POLL_INTERVAL'<\/str>\
<\/lst>/g' $SOLR_CONFIG_FILE
sed -i "s/enable.alfresco.tracking=true/enable.alfresco.tracking=false\nenable.master=false\nenable.slave=true/g" $SOLR_CORE_FILE
fi
SOLR_IN_FILE=$PWD/solr.in.sh