Merge branch 'master' of https://git.alfresco.com/search_discovery/insightengine into feature/SEARCH-1752

This commit is contained in:
agazzarini
2019-10-19 07:05:24 +02:00
13 changed files with 664 additions and 285 deletions

View File

@@ -13,7 +13,7 @@
<properties>
<tas.rest.api.version>6.0.1.2</tas.rest.api.version>
<tas.cmis.api.version>6.0.0.4</tas.cmis.api.version>
<tas.utility.version>3.0.11</tas.utility.version>
<tas.utility.version>3.0.12</tas.utility.version>
<!-- GS V3.2-0-SNAPSHOT will have to be used for GS-IE automation tests due to the dependencies in TAS Rest API project
This version needs to be later updated when GS V3.2-0 is released -->
<rm.version>3.2.0-SNAPSHOT</rm.version>

View File

@@ -1,5 +1,5 @@
/*
* Copyright (C) 2005-2016 Alfresco Software Limited.
* Copyright (C) 2005-2019 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -16,31 +16,291 @@
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.solr.lifecycle;
import static java.util.Optional.ofNullable;
import org.alfresco.opencmis.dictionary.CMISStrictDictionaryService;
import org.alfresco.solr.AlfrescoCoreAdminHandler;
import org.alfresco.solr.AlfrescoSolrDataModel;
import org.alfresco.solr.SolrInformationServer;
import org.alfresco.solr.SolrKeyResourceLoader;
import org.alfresco.solr.client.SOLRAPIClient;
import org.alfresco.solr.client.SOLRAPIClientFactory;
import org.alfresco.solr.content.SolrContentStore;
import org.alfresco.solr.tracker.AclTracker;
import org.alfresco.solr.tracker.CascadeTracker;
import org.alfresco.solr.tracker.CommitTracker;
import org.alfresco.solr.tracker.ContentTracker;
import org.alfresco.solr.tracker.MetadataTracker;
import org.alfresco.solr.tracker.ModelTracker;
import org.alfresco.solr.tracker.SolrTrackerScheduler;
import org.alfresco.solr.tracker.Tracker;
import org.alfresco.solr.tracker.TrackerRegistry;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.core.AbstractSolrEventListener;
import org.apache.solr.core.CloseHook;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.CoreDescriptorDecorator;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.handler.ReplicationHandler;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.search.SolrIndexSearcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Properties;
import java.util.function.Function;
import java.util.function.Predicate;
/**
* Listens for the first searcher to be created for a core and registers the trackers
* Listeners for *FIRST SEARCHER* events in order to prepare and register the SolrContentStore and the Tracking Subsystem.
*
* @author Gethin James
* @author Andrea Gazzarini
*/
public class SolrCoreLoadListener extends AbstractSolrEventListener {
public class SolrCoreLoadListener extends AbstractSolrEventListener
{
private static final Logger LOGGER = LoggerFactory.getLogger(SolrCoreLoadListener.class);
public SolrCoreLoadListener(SolrCore core) {
/**
* Builds a new listener instance with the given {@link SolrCore} (event source).
*
* @param core the {@link SolrCore} instance representing the event source of this listener.
*/
public SolrCoreLoadListener(SolrCore core)
{
super(core);
}
@Override
public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher) {
public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher)
{
CoreContainer coreContainer = getCore().getCoreContainer();
AlfrescoCoreAdminHandler coreAdminHandler = (AlfrescoCoreAdminHandler) coreContainer.getMultiCoreHandler();
AlfrescoCoreAdminHandler admin = (AlfrescoCoreAdminHandler) coreContainer.getMultiCoreHandler();
SolrCore core = getCore();
TrackerRegistry trackerRegistry = admin.getTrackerRegistry();
Properties coreProperties = new CoreDescriptorDecorator(core.getCoreDescriptor()).getProperties();
SolrResourceLoader loader = core.getLatestSchema().getResourceLoader();
SolrKeyResourceLoader keyResourceLoader = new SolrKeyResourceLoader(loader);
SOLRAPIClientFactory clientFactory = new SOLRAPIClientFactory();
SOLRAPIClient repositoryClient =
clientFactory.getSOLRAPIClient(coreProperties, keyResourceLoader,
AlfrescoSolrDataModel.getInstance().getDictionaryService(CMISStrictDictionaryService.DEFAULT),
AlfrescoSolrDataModel.getInstance().getNamespaceDAO());
SolrContentStore contentStore = new SolrContentStore(coreContainer.getSolrHome());
SolrInformationServer informationServer = new SolrInformationServer(admin, core, repositoryClient, contentStore);
coreProperties.putAll(informationServer.getProps());
admin.getInformationServers().put(core.getName(), informationServer);
final SolrTrackerScheduler scheduler = admin.getScheduler();
// Prevents other threads from registering the ModelTracker at the same time
// Create model tracker and load all the persisted models
synchronized(SolrCoreLoadListener.class)
{
createModelTracker(core.getName(),
trackerRegistry,
coreProperties,
coreContainer.getSolrHome(),
repositoryClient,
informationServer,
scheduler);
}
boolean trackersHaveBeenEnabled = Boolean.parseBoolean(coreProperties.getProperty("enable.alfresco.tracking", "true"));
boolean owningCoreIsSlave = isSlaveModeEnabledFor(core);
// Guard conditions: if trackers must be disabled then immediately return, we've done here.
// Case #1: trackers have been explicitly disabled.
if (!trackersHaveBeenEnabled)
{
LOGGER.info("SearchServices Core Trackers have been explicitly disabled on core \"{}\" through \"enable.alfresco.tracking\" configuration property.", core.getName());
return;
}
// Case #2: we are on a slave node.
if (owningCoreIsSlave)
{
LOGGER.info("SearchServices Core Trackers have been disabled on core \"{}\" because it is a slave core.", core.getName());
return;
}
LOGGER.info("SearchServices Tracking Subsystem starts on core {}", core.getName());
if (trackerRegistry.hasTrackersForCore(core.getName()))
{
LOGGER.info("Trackers for " + core.getName()+ " is already registered, shutting them down.");
shutdownTrackers(core.getName(), trackerRegistry.getTrackersForCore(core.getName()), scheduler);
trackerRegistry.removeTrackersForCore(core.getName());
admin.getInformationServers().remove(core.getName());
}
final List<Tracker> trackers = createCoreTrackers(core.getName(), trackerRegistry, coreProperties, scheduler, repositoryClient, informationServer);
CommitTracker commitTracker = new CommitTracker(coreProperties, repositoryClient, core.getName(), informationServer, trackers);
trackerRegistry.register(core.getName(), commitTracker);
scheduler.schedule(commitTracker, core.getName(), coreProperties);
LOGGER.info("SearchServices Core Trackers have been correctly registered and scheduled.");
//Add the commitTracker to the list of scheduled trackers that can be shutdown
trackers.add(commitTracker);
core.addCloseHook(new CloseHook()
{
@Override
public void preClose(SolrCore core)
{
LOGGER.info("Tracking Subsystem shutdown procedure for core {} has been started.", core.getName());
shutdownTrackers(core.getName(), trackers, scheduler);
}
@Override
public void postClose(SolrCore core)
{
LOGGER.info("Shutdown procedure for core {} has been completed.", core.getName());
}
});
SolrCoreLoadRegistration.registerForCore(coreAdminHandler, coreContainer, getCore(), getCore().getName());
}
}
List<Tracker> createCoreTrackers(String coreName,
TrackerRegistry trackerRegistry,
Properties props,
SolrTrackerScheduler scheduler,
SOLRAPIClient repositoryClient,
SolrInformationServer srv)
{
List<Tracker> trackers = new ArrayList<>();
AclTracker aclTracker = new AclTracker(props, repositoryClient, coreName, srv);
trackerRegistry.register(coreName, aclTracker);
scheduler.schedule(aclTracker, coreName, props);
ContentTracker contentTrkr = new ContentTracker(props, repositoryClient, coreName, srv);
trackerRegistry.register(coreName, contentTrkr);
scheduler.schedule(contentTrkr, coreName, props);
MetadataTracker metaTrkr = new MetadataTracker(props, repositoryClient, coreName, srv);
trackerRegistry.register(coreName, metaTrkr);
scheduler.schedule(metaTrkr, coreName, props);
CascadeTracker cascadeTrkr = new CascadeTracker(props, repositoryClient, coreName, srv);
trackerRegistry.register(coreName, cascadeTrkr);
scheduler.schedule(cascadeTrkr, coreName, props);
//The CommitTracker will acquire these locks in order
//The ContentTracker will likely have the longest runs so put it first to ensure the MetadataTracker is not paused while
//waiting for the ContentTracker to release it's lock.
//The aclTracker will likely have the shortest runs so put it last.
trackers.add(cascadeTrkr);
trackers.add(contentTrkr);
trackers.add(metaTrkr);
trackers.add(aclTracker);
return trackers;
}
private void createModelTracker(String coreName,
TrackerRegistry trackerRegistry,
Properties props,
String solrHome,
SOLRAPIClient repositoryClient,
SolrInformationServer srv,
SolrTrackerScheduler scheduler)
{
ModelTracker mTracker = trackerRegistry.getModelTracker();
if (mTracker == null)
{
LOGGER.debug("Creating a new Model Tracker instance.");
mTracker = new ModelTracker(solrHome, props, repositoryClient, coreName, srv);
trackerRegistry.setModelTracker(mTracker);
LOGGER.info("Model Tracker: ensuring first model sync.");
mTracker.ensureFirstModelSync();
scheduler.schedule(mTracker, coreName, props);
LOGGER.info("Model Tracker has been correctly initialised, registered and scheduled.");
}
}
/**
* Shuts down the trackers for a core.
*
* The trackers are only deleted from the scheduler if they are the exact same instance of the Tracker class
* passed into this method.
* For example, you could have 2 cores of the same name and have the trackers registered with the scheduler BUT
* the scheduler only keys by core name. The Collection<Tracker>s passed into this method are only removed
* from the scheduler if the instances are == (equal). See scheduler.deleteJobForTrackerInstance()
*
* Trackers are not removed from the registry because the registry only keys by core name; its possible to
* have multiple cores of the same name running. Left over trackers in the registry are cleaned up by the CoreContainer
* shutdown, that happens in the the AlfrescoCoreAdminHandler.shutdown().
*
* @param coreName The name of the core
* @param coreTrackers A collection of trackers
* @param scheduler The scheduler
*/
void shutdownTrackers(String coreName, Collection<Tracker> coreTrackers, SolrTrackerScheduler scheduler)
{
try
{
LOGGER.info("Shutting down Trackers Subsystem for core \"{}\" which contains {} core trackers.", coreName, coreTrackers.size());
// Sets the shutdown flag on the trackers to stop them from doing any more work
coreTrackers.forEach(tracker -> tracker.setShutdown(true));
if (!scheduler.isShutdown())
{
coreTrackers.forEach(tracker -> scheduler.deleteJobForTrackerInstance(coreName, tracker) );
}
coreTrackers.forEach(Tracker::shutdown);
}
catch (Exception e)
{
LOGGER.error("Tracking Subsystem shutdown procedure failed to shutdown trackers for core {}. See the stacktrace below for further details.", coreName, e);
}
}
/**
* Checks if the content store belonging to the hosting Solr node must be set in read only mode.
*
* @param core the hosting {@link SolrCore} instance.
* @return true if the content store must be set in read only mode, false otherwise.
*/
boolean isSlaveModeEnabledFor(SolrCore core)
{
Predicate<PluginInfo> onlyReplicationHandler =
plugin -> "/replication".equals(plugin.name)
|| plugin.className.endsWith(ReplicationHandler.class.getSimpleName());
Function<NamedList, Boolean> isSlaveModeEnabled =
params -> ofNullable(params)
.map(configuration -> {
Object enable = configuration.get("enable");
return enable == null ||
(enable instanceof String ? StrUtils.parseBool((String)enable) : Boolean.TRUE.equals(enable));})
.orElse(false);
return core.getSolrConfig().getPluginInfos(SolrRequestHandler.class.getName())
.stream()
.filter(PluginInfo::isEnabled)
.filter(onlyReplicationHandler)
.findFirst()
.map(plugin -> plugin.initArgs)
.map(params -> params.get("slave"))
.map(NamedList.class::cast)
.map(isSlaveModeEnabled)
.orElse(false);
}
}

View File

@@ -1,258 +0,0 @@
/*
* Copyright (C) 2005-2016 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.solr.lifecycle;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Properties;
import org.alfresco.opencmis.dictionary.CMISStrictDictionaryService;
import org.alfresco.solr.AlfrescoCoreAdminHandler;
import org.alfresco.solr.AlfrescoSolrDataModel;
import org.alfresco.solr.SolrInformationServer;
import org.alfresco.solr.SolrKeyResourceLoader;
import org.alfresco.solr.client.SOLRAPIClient;
import org.alfresco.solr.client.SOLRAPIClientFactory;
import org.alfresco.solr.content.SolrContentStore;
import org.alfresco.solr.tracker.AclTracker;
import org.alfresco.solr.tracker.CascadeTracker;
import org.alfresco.solr.tracker.CommitTracker;
import org.alfresco.solr.tracker.ContentTracker;
import org.alfresco.solr.tracker.MetadataTracker;
import org.alfresco.solr.tracker.ModelTracker;
import org.alfresco.solr.tracker.SolrTrackerScheduler;
import org.alfresco.solr.tracker.Tracker;
import org.alfresco.solr.tracker.TrackerRegistry;
import org.apache.solr.core.CloseHook;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.CoreDescriptorDecorator;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrResourceLoader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Deals with core registration when the core is loaded.
*
* @author Gethin James
*/
public class SolrCoreLoadRegistration {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
/**
* Registers with the admin handler the information server and the trackers.
*/
public static void registerForCore(AlfrescoCoreAdminHandler adminHandler, CoreContainer coreContainer, SolrCore core,
String coreName)
{
TrackerRegistry trackerRegistry = adminHandler.getTrackerRegistry();
Properties props = new CoreDescriptorDecorator(core.getCoreDescriptor()).getProperties();
//Prepare cores
SolrResourceLoader loader = core.getLatestSchema().getResourceLoader();
SolrKeyResourceLoader keyResourceLoader = new SolrKeyResourceLoader(loader);
SOLRAPIClientFactory clientFactory = new SOLRAPIClientFactory();
SOLRAPIClient repositoryClient = clientFactory.getSOLRAPIClient(props, keyResourceLoader,
AlfrescoSolrDataModel.getInstance().getDictionaryService(CMISStrictDictionaryService.DEFAULT),
AlfrescoSolrDataModel.getInstance().getNamespaceDAO());
//Start content store
SolrContentStore contentStore = new SolrContentStore(coreContainer.getSolrHome());
SolrInformationServer srv = new SolrInformationServer(adminHandler, core, repositoryClient, contentStore);
props.putAll(srv.getProps());
adminHandler.getInformationServers().put(coreName, srv);
SolrTrackerScheduler scheduler = adminHandler.getScheduler();
// Prevents other threads from registering the ModelTracker at the same time
// Create model tracker and load all the persisted models
createModelTracker(coreName,
trackerRegistry,
props,
coreContainer.getSolrHome(),
repositoryClient,
srv,
scheduler);
log.info("Starting to track " + coreName);
if (Boolean.parseBoolean(props.getProperty("enable.alfresco.tracking", "false")))
{
if (trackerRegistry.hasTrackersForCore(coreName))
{
log.info("Trackers for " + coreName+ " is already registered, shutting them down.");
shutdownTrackers(coreName, trackerRegistry.getTrackersForCore(coreName),scheduler);
trackerRegistry.removeTrackersForCore(coreName);
adminHandler.getInformationServers().remove(coreName);
}
List<Tracker> trackers = createCoreTrackers(coreName, trackerRegistry, props, scheduler, repositoryClient, srv);
CommitTracker commitTracker = new CommitTracker(props, repositoryClient, coreName, srv, trackers);
trackerRegistry.register(coreName, commitTracker);
scheduler.schedule(commitTracker, coreName, props);
log.info("The Trackers are now scheduled to run");
trackers.add(commitTracker); //Add the commitTracker to the list of scheduled trackers that can be shutdown
core.addCloseHook(new CloseHook()
{
@Override
public void preClose(SolrCore core)
{
log.info("Shutting down " + core.getName());
SolrCoreLoadRegistration.shutdownTrackers(core.getName(), trackers, scheduler);
}
@Override
public void postClose(SolrCore core)
{
// Nothing to be done here
}
});
}
}
/**
* Creates the trackers
*
* @param coreName
* @param trackerRegistry
* @param props
* @param scheduler
* @param repositoryClient
* @param srv
* @return A list of trackers
*/
private static List<Tracker> createCoreTrackers(String coreName,
TrackerRegistry trackerRegistry,
Properties props,
SolrTrackerScheduler scheduler,
SOLRAPIClient repositoryClient,
SolrInformationServer srv) {
List<Tracker> trackers = new ArrayList<Tracker>();
AclTracker aclTracker = new AclTracker(props, repositoryClient, coreName, srv);
trackerRegistry.register(coreName, aclTracker);
scheduler.schedule(aclTracker, coreName, props);
ContentTracker contentTrkr = new ContentTracker(props, repositoryClient, coreName, srv);
trackerRegistry.register(coreName, contentTrkr);
scheduler.schedule(contentTrkr, coreName, props);
MetadataTracker metaTrkr = new MetadataTracker(props, repositoryClient, coreName, srv);
trackerRegistry.register(coreName, metaTrkr);
scheduler.schedule(metaTrkr, coreName, props);
CascadeTracker cascadeTrkr = new CascadeTracker(props, repositoryClient, coreName, srv);
trackerRegistry.register(coreName, cascadeTrkr);
scheduler.schedule(cascadeTrkr, coreName, props);
//The CommitTracker will acquire these locks in order
//The ContentTracker will likely have the longest runs so put it first to ensure the MetadataTracker is not paused while
//waiting for the ContentTracker to release it's lock.
//The aclTracker will likely have the shortest runs so put it last.
trackers.add(cascadeTrkr);
trackers.add(contentTrkr);
trackers.add(metaTrkr);
trackers.add(aclTracker);
return trackers;
}
/**
* Create model tracker and load persisted models.
*
* @param coreName
* @param trackerRegistry
* @param props
* @param solrHome
* @param repositoryClient
* @param srv
* @param scheduler
* @return true if model tracker has been created, false if it already exists.
*/
private synchronized static void createModelTracker(String coreName,
TrackerRegistry trackerRegistry,
Properties props,
String solrHome,
SOLRAPIClient repositoryClient,
SolrInformationServer srv,
SolrTrackerScheduler scheduler)
{
ModelTracker mTracker = trackerRegistry.getModelTracker();
if (mTracker == null)
{
log.debug("Creating ModelTracker");
mTracker = new ModelTracker(solrHome, props, repositoryClient,
coreName, srv);
trackerRegistry.setModelTracker(mTracker);
log.info("Ensuring first model sync.");
mTracker.ensureFirstModelSync();
log.info("Done ensuring first model sync.");
//Scheduling the ModelTracker.
scheduler.schedule(mTracker, coreName, props);
}
}
/**
* Shuts down the trackers for a core.
*
* The trackers are only deleted from the scheduler if they are the exact same instance of the Tracker class
* passed into this method.
* For example, you could have 2 cores of the same name and have the trackers registered with the scheduler BUT
* the scheduler only keys by core name. The Collection<Tracker>s passed into this method are only removed
* from the scheduler if the instances are == (equal). See scheduler.deleteJobForTrackerInstance()
*
* Trackers are not removed from the registry because the registry only keys by core name; its possible to
* have multiple cores of the same name running. Left over trackers in the registry are cleaned up by the CoreContainer
* shutdown, that happens in the the AlfrescoCoreAdminHandler.shutdown().
*
* @param coreName The name of the core
* @param coreTrackers A collection of trackers
* @param scheduler The scheduler
*/
public static void shutdownTrackers(String coreName, Collection<Tracker> coreTrackers, SolrTrackerScheduler scheduler)
{
try
{
log.info("Shutting down " + coreName + " with " + coreTrackers.size() + " trackers.");
// Sets the shutdown flag on the trackers to stop them from doing any more work
coreTrackers.forEach(tracker -> tracker.setShutdown(true));
if (!scheduler.isShutdown())
{
coreTrackers.forEach(tracker -> scheduler.deleteJobForTrackerInstance(coreName,tracker) );
}
coreTrackers.forEach(tracker -> tracker.shutdown());
}
catch (Exception e)
{
log.error("Failed to shutdown trackers for core "+coreName, e);
}
}
}

View File

@@ -327,15 +327,17 @@ public abstract class AbstractTracker implements Tracker
throw new IndexTrackingShutdownException();
}
}
@Override
public void setShutdown(boolean shutdown)
{
this.shutdown = shutdown;
}
@Override
public void shutdown()
{
log.warn("Core "+ coreName+" shutdown called on tracker. " + getClass().getSimpleName() + " " + hashCode());
log.warn("Core " + coreName + " shutdown called on tracker. " + getClass().getSimpleName() + " " + hashCode());
setShutdown(true);
if(this.threadHandler != null)
{

View File

@@ -115,12 +115,16 @@ public class ModelTracker extends AbstractTracker implements Tracker
loadPersistedModels();
}
public boolean hasMaintenance() {
@Override
public boolean hasMaintenance()
{
return false;
}
public void maintenance() {
@Override
public void maintenance()
{
// Nothing to be done here
}
/**
@@ -203,7 +207,7 @@ public class ModelTracker extends AbstractTracker implements Tracker
public void trackModels(boolean onlyFirstTime) throws AuthenticationException, IOException, JSONException
{
boolean requiresWriteLock = false;
boolean requiresWriteLock;
modelLock.readLock().lock();
try
{
@@ -271,10 +275,6 @@ public class ModelTracker extends AbstractTracker implements Tracker
/**
* Tracks models. Reflects changes and updates on disk copy
*
* @throws AuthenticationException
* @throws IOException
* @throws JSONException
*/
private void trackModelsImpl() throws AuthenticationException, IOException, JSONException
{
@@ -480,16 +480,10 @@ public class ModelTracker extends AbstractTracker implements Tracker
}
return expandedQName;
}
/**
* @param alfrescoModelDir
* @param modelName
*/
private void removeMatchingModels(File alfrescoModelDir, QName modelName)
{
final String prefix = modelName.toPrefixString(this.infoSrv.getNamespaceDAO()).replace(":", ".") + ".";
final String postFix = ".xml";

View File

@@ -0,0 +1,170 @@
/*
* Copyright (C) 2005-2013 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.solr.lifecycle;
import org.alfresco.solr.SolrInformationServer;
import org.alfresco.solr.client.SOLRAPIClient;
import org.alfresco.solr.tracker.AclTracker;
import org.alfresco.solr.tracker.CascadeTracker;
import org.alfresco.solr.tracker.ContentTracker;
import org.alfresco.solr.tracker.MetadataTracker;
import org.alfresco.solr.tracker.SolrTrackerScheduler;
import org.alfresco.solr.tracker.Tracker;
import org.alfresco.solr.tracker.TrackerRegistry;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrCore;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.xml.sax.InputSource;
import java.util.List;
import java.util.Properties;
import static java.util.Arrays.asList;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.same;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* Unit tests for the {@link SolrCoreLoadListener}.
*
* @author Andrea Gazzarini
* @since 1.5
*/
@RunWith(MockitoJUnitRunner.class)
public class SolrCoreLoadListenerTest
{
private SolrCoreLoadListener listener;
@Mock
private SolrCore core;
@Mock
private SolrTrackerScheduler scheduler;
@Mock
private SOLRAPIClient api;
@Mock
private SolrInformationServer informationServer;
@Mock
private TrackerRegistry registry;
private Properties coreProperties;
private String coreName = "XYZ";
@Before
public void setUp()
{
listener = new SolrCoreLoadListener(core);
when(core.getName()).thenReturn(coreName);
coreProperties = new Properties();
}
@Test
public void coreTrackersRegistrationAndScheduling()
{
List<Tracker> coreTrackers = listener.createCoreTrackers(core.getName(), registry, coreProperties, scheduler, api, informationServer);
verify(registry).register(eq(coreName), any(AclTracker.class));
verify(registry).register(eq(coreName), any(ContentTracker.class));
verify(registry).register(eq(coreName), any(MetadataTracker.class));
verify(registry).register(eq(coreName), any(CascadeTracker.class));
verify(scheduler).schedule(any(AclTracker.class), eq(coreName), same(coreProperties));
verify(scheduler).schedule(any(ContentTracker.class), eq(coreName), same(coreProperties));
verify(scheduler).schedule(any(MetadataTracker.class), eq(coreName), same(coreProperties));
verify(scheduler).schedule(any(CascadeTracker.class), eq(coreName), same(coreProperties));
assertEquals(4, coreTrackers.size());
}
@Test
public void trackersShutDownProcedure()
{
List<Tracker> coreTrackers =
asList(mock(AclTracker.class), mock(ContentTracker.class), mock(MetadataTracker.class), mock(CascadeTracker.class));
listener.shutdownTrackers(coreName, coreTrackers, scheduler);
coreTrackers.forEach(tracker -> verify(tracker).setShutdown(true));
coreTrackers.forEach(tracker -> verify(scheduler).deleteJobForTrackerInstance(core.getName(), tracker));
coreTrackers.forEach(tracker -> verify(tracker).shutdown());
}
@Test
public void noReplicationHandlerDefined_thenContentStoreIsInReadWriteMode() throws Exception
{
prepare("solrconfig_no_replication_handler_defined.xml");
assertFalse("If no replication handler is defined, then we expect to run a RW content store.", listener.isSlaveModeEnabledFor(core));
}
@Test
public void emptyReplicationHandlerDefined_thenContentStoreIsInReadWriteMode() throws Exception
{
prepare("solrconfig_empty_replication_handler.xml");
assertFalse("If an empty replication handler is defined, then we expect to run a RW content store.", listener.isSlaveModeEnabledFor(core));
}
@Test
public void slaveReplicationHandlerDefinedButDisabled_thenContentStoreIsInReadWriteMode() throws Exception
{
prepare("solrconfig_slave_disabled_replication_handler.xml");
assertFalse("If a slave replication handler is defined but disabled, then we expect to run a RW content store.", listener.isSlaveModeEnabledFor(core));
}
@Test
public void masterReplicationHandlerDefined_thenContentStoreIsInReadWriteMode() throws Exception
{
prepare("solrconfig_master_replication_handler.xml");
assertFalse("If a master replication handler is defined but disabled, then we expect to run a RW content store.", listener.isSlaveModeEnabledFor(core));
}
@Test
public void masterReplicationHandlerDefinedButDisabled_thenContentStoreIsInReadWriteMode() throws Exception
{
prepare("solrconfig_master_disabled_replication_handler.xml");
assertFalse("If a master replication handler is defined but disabled, then we expect to run a RW content store.", listener.isSlaveModeEnabledFor(core));
}
@Test
public void slaveReplicationHandlerDefined_thenContentStoreIsInReadOnlyMode() throws Exception
{
prepare("solrconfig_slave_replication_handler.xml");
assertTrue("If a slave replication handler is defined, then we expect to run a RO content store.", listener.isSlaveModeEnabledFor(core));
}
private void prepare(String configName) throws Exception
{
SolrConfig solrConfig = new SolrConfig(configName, new InputSource(getClass().getResourceAsStream("/test-files/" + configName)));
when(core.getSolrConfig()).thenReturn(solrConfig);
}
}

View File

@@ -0,0 +1,36 @@
<?xml version="1.0" encoding="UTF-8" ?>
<config>
<luceneMatchVersion>6.6.5</luceneMatchVersion>
<dataDir>/tmp/test</dataDir>
<schemaFactory class="ClassicIndexSchemaFactory" />
<indexConfig>
<lockType>${solr.lock.type:native}</lockType>
</indexConfig>
<requestDispatcher handleSelect="true">
<requestParsers
enableRemoteStreaming="true"
multipartUploadLimitInKB="2048000"
formdataUploadLimitInKB="2048" />
<httpCaching never304="true" />
</requestDispatcher>
<requestHandler name="/replication" class="io.sease.labs.solr.handler.ReplicationHandler">
<lst name="master">
<str name="replicateAfter">commit</str>
<str name="confFiles">schema.xml</str>
</lst>
</requestHandler>
<requestHandler name="/def" class="solr.SearchHandler" default="true">
<lst name="defaults">
<bool name="sow">false</bool>
<str name="df">id</str>
<str name="defType">lucene</str>
</lst>
</requestHandler>
<admin>
<defaultQuery>Query me!</defaultQuery>
</admin>
</config>

View File

@@ -0,0 +1,37 @@
<?xml version="1.0" encoding="UTF-8" ?>
<config>
<luceneMatchVersion>6.6.5</luceneMatchVersion>
<dataDir>/tmp/test</dataDir>
<schemaFactory class="ClassicIndexSchemaFactory" />
<indexConfig>
<lockType>${solr.lock.type:native}</lockType>
</indexConfig>
<requestDispatcher handleSelect="true">
<requestParsers
enableRemoteStreaming="true"
multipartUploadLimitInKB="2048000"
formdataUploadLimitInKB="2048" />
<httpCaching never304="true" />
</requestDispatcher>
<requestHandler name="/replication" class="io.sease.labs.solr.handler.ReplicationHandler">
<lst name="master">
<str name="enable">false</str>
<str name="replicateAfter">commit</str>
<str name="confFiles">schema.xml</str>
</lst>
</requestHandler>
<requestHandler name="/def" class="solr.SearchHandler" default="true">
<lst name="defaults">
<bool name="sow">false</bool>
<str name="df">id</str>
<str name="defType">lucene</str>
</lst>
</requestHandler>
<admin>
<defaultQuery>Query me!</defaultQuery>
</admin>
</config>

View File

@@ -0,0 +1,36 @@
<?xml version="1.0" encoding="UTF-8" ?>
<config>
<luceneMatchVersion>6.6.5</luceneMatchVersion>
<dataDir>/tmp/test</dataDir>
<schemaFactory class="ClassicIndexSchemaFactory" />
<indexConfig>
<lockType>${solr.lock.type:native}</lockType>
</indexConfig>
<requestDispatcher handleSelect="true">
<requestParsers
enableRemoteStreaming="true"
multipartUploadLimitInKB="2048000"
formdataUploadLimitInKB="2048" />
<httpCaching never304="true" />
</requestDispatcher>
<requestHandler name="/replication" class="io.sease.labs.solr.handler.ReplicationHandler">
<lst name="master">
<str name="replicateAfter">commit</str>
<str name="confFiles">schema.xml</str>
</lst>
</requestHandler>
<requestHandler name="/def" class="solr.SearchHandler" default="true">
<lst name="defaults">
<bool name="sow">false</bool>
<str name="df">id</str>
<str name="defType">lucene</str>
</lst>
</requestHandler>
<admin>
<defaultQuery>Query me!</defaultQuery>
</admin>
</config>

View File

@@ -0,0 +1,29 @@
<?xml version="1.0" encoding="UTF-8" ?>
<config>
<luceneMatchVersion>6.6.5</luceneMatchVersion>
<dataDir>/tmp/test</dataDir>
<schemaFactory class="ClassicIndexSchemaFactory" />
<indexConfig>
<lockType>${solr.lock.type:native}</lockType>
</indexConfig>
<requestDispatcher handleSelect="true">
<requestParsers
enableRemoteStreaming="true"
multipartUploadLimitInKB="2048000"
formdataUploadLimitInKB="2048" />
<httpCaching never304="true" />
</requestDispatcher>
<requestHandler name="/def" class="solr.SearchHandler" default="true">
<lst name="defaults">
<bool name="sow">false</bool>
<str name="df">id</str>
<str name="defType">lucene</str>
</lst>
</requestHandler>
<admin>
<defaultQuery>Query me!</defaultQuery>
</admin>
</config>

View File

@@ -0,0 +1,37 @@
<?xml version="1.0" encoding="UTF-8" ?>
<config>
<luceneMatchVersion>6.6.5</luceneMatchVersion>
<dataDir>/tmp/test</dataDir>
<schemaFactory class="ClassicIndexSchemaFactory" />
<indexConfig>
<lockType>${solr.lock.type:native}</lockType>
</indexConfig>
<requestDispatcher handleSelect="true">
<requestParsers
enableRemoteStreaming="true"
multipartUploadLimitInKB="2048000"
formdataUploadLimitInKB="2048" />
<httpCaching never304="true" />
</requestDispatcher>
<requestHandler name="/replication" class="solr.ReplicationHandler">
<lst name="slave">
<str name="enable">false</str>
<str name="masterUrl">http://localhost:8983/solr/alfresco</str>
<str name="pollInterval">00:00:20</str>
</lst>
</requestHandler>
<requestHandler name="/def" class="solr.SearchHandler" default="true">
<lst name="defaults">
<bool name="sow">false</bool>
<str name="df">id</str>
<str name="defType">lucene</str>
</lst>
</requestHandler>
<admin>
<defaultQuery>Query me!</defaultQuery>
</admin>
</config>

View File

@@ -0,0 +1,36 @@
<?xml version="1.0" encoding="UTF-8" ?>
<config>
<luceneMatchVersion>6.6.5</luceneMatchVersion>
<dataDir>/tmp/test</dataDir>
<schemaFactory class="ClassicIndexSchemaFactory" />
<indexConfig>
<lockType>${solr.lock.type:native}</lockType>
</indexConfig>
<requestDispatcher handleSelect="true">
<requestParsers
enableRemoteStreaming="true"
multipartUploadLimitInKB="2048000"
formdataUploadLimitInKB="2048" />
<httpCaching never304="true" />
</requestDispatcher>
<requestHandler name="/replication" class="solr.ReplicationHandler">
<lst name="slave">
<str name="masterUrl">http://localhost:8983/solr/alfresco</str>
<str name="pollInterval">00:00:20</str>
</lst>
</requestHandler>
<requestHandler name="/def" class="solr.SearchHandler" default="true">
<lst name="defaults">
<bool name="sow">false</bool>
<str name="df">id</str>
<str name="defType">lucene</str>
</lst>
</requestHandler>
<admin>
<defaultQuery>Query me!</defaultQuery>
</admin>
</config>

View File

@@ -22,7 +22,7 @@
</distributionManagement>
<properties>
<dependency.alfresco-data-model.version>8.52</dependency.alfresco-data-model.version>
<dependency.alfresco-data-model.version>8.53</dependency.alfresco-data-model.version>
<dependency.jackson.version>2.10.0</dependency.jackson.version>
</properties>