Merge branch 'feature/SEARCH-1949_ConfigOptionForPathQueries' into 'master'

Feature/search 1949 config option for path queries

See merge request search_discovery/insightengine!343
This commit is contained in:
Tom Page
2020-01-28 09:54:27 +00:00
12 changed files with 239 additions and 127 deletions

View File

@@ -31,7 +31,7 @@ import static org.testng.Assert.assertTrue;
* @author Alessandro Benedetti
* @author Meenal Bhave
*/
public class CascadingTrackerIntegrationTest extends AbstractE2EFunctionalTest
public class CascadingIntegrationTest extends AbstractE2EFunctionalTest
{
@Autowired
protected DataContent dataContent;

View File

@@ -523,7 +523,7 @@ public class AlfrescoSolrDataModel implements QueryConstants
}
catch (IOException e)
{
log.info("Failed to read shared properties fat " + propertiesFile.getAbsolutePath());
log.info("Failed to read shared properties at " + propertiesFile.getAbsolutePath());
}
return props;

View File

@@ -88,7 +88,7 @@ public interface InformationServer extends InformationServerCollectionProvider
void indexNode(Node node, boolean overwrite) throws IOException, AuthenticationException, JSONException;
void indexNodes(List<Node> nodes, boolean overwrite, boolean cascade) throws IOException, AuthenticationException, JSONException;
void indexNodes(List<Node> nodes, boolean overwrite) throws IOException, AuthenticationException, JSONException;
void cascadeNodes(List<NodeMetaData> nodes, boolean overwrite) throws IOException, AuthenticationException, JSONException;
@@ -181,4 +181,11 @@ public interface InformationServer extends InformationServerCollectionProvider
String getBaseUrl();
void flushContentStore() throws IOException;
/**
* Check if cascade tracking is enabled.
*
* @return true if cascade tracking is enabled (note that this is the default behaviour if not specified in the properties file).
*/
boolean cascadeTrackingEnabled();
}

View File

@@ -19,6 +19,7 @@
package org.alfresco.solr;
import static java.util.Optional.ofNullable;
import static org.alfresco.repo.search.adaptor.lucene.QueryConstants.FIELD_ACLID;
import static org.alfresco.repo.search.adaptor.lucene.QueryConstants.FIELD_ACLTXCOMMITTIME;
import static org.alfresco.repo.search.adaptor.lucene.QueryConstants.FIELD_ACLTXID;
@@ -76,17 +77,32 @@ import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import java.text.DecimalFormat;
import java.util.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.GZIPInputStream;
import com.carrotsearch.hppc.IntArrayList;
import com.carrotsearch.hppc.LongHashSet;
import com.carrotsearch.hppc.cursors.LongCursor;
import org.alfresco.httpclient.AuthenticationException;
import org.alfresco.model.ContentModel;
import org.alfresco.opencmis.dictionary.CMISStrictDictionaryService;
@@ -142,7 +158,19 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopFieldCollector;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
@@ -167,8 +195,6 @@ import org.apache.solr.search.DelegatingCollector;
import org.apache.solr.search.DocIterator;
import org.apache.solr.search.DocList;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.QueryCommand;
import org.apache.solr.search.QueryResult;
import org.apache.solr.search.QueryWrapperFilter;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.update.AddUpdateCommand;
@@ -205,10 +231,7 @@ public class SolrInformationServer implements InformationServer
public static final String AND = " AND ";
public static final String OR = " OR ";
//public static final String REQUEST_HANDLER_ALFRESCO_FULL_TEXT_SEARCH = "/afts";
private static final String REQUEST_HANDLER_NATIVE = "/native";
//public static final String REQUEST_HANDLER_ALFRESCO = "/alfresco";
//public static final String REQUEST_HANDLER_SELECT = "/select";
static final String REQUEST_HANDLER_GET = "/get";
private static final String RESPONSE_DEFAULT_IDS = "response";
static final String RESPONSE_DEFAULT_ID = "doc";
@@ -232,6 +255,8 @@ public class SolrInformationServer implements InformationServer
*/
private static final int BATCH_FACET_TXS = 4096;
private static final String FINGERPRINT_FIELD = "MINHASH";
/** Shared property to determine if the cascade tracking is enabled. */
public static final String CASCADE_TRACKER_ENABLED = "alfresco.cascade.tracker.enabled";
private final AlfrescoCoreAdminHandler adminHandler;
private final SolrCore core;
@@ -456,6 +481,14 @@ public class SolrInformationServer implements InformationServer
return this.adminHandler;
}
/** {@inheritDoc} */
@Override
public boolean cascadeTrackingEnabled()
{
String cascadeTrackerEnabledProp = ofNullable((String) props.get(CASCADE_TRACKER_ENABLED)).orElse("true");
return Boolean.valueOf(cascadeTrackerEnabledProp);
}
@Override
public synchronized void initSkippingDescendantDocs()
{
@@ -1371,7 +1404,10 @@ public class SolrInformationServer implements InformationServer
public void dirtyTransaction(long txnId)
{
this.cleanContentCache.remove(txnId);
this.cleanCascadeCache.remove(txnId);
if (cascadeTrackingEnabled())
{
this.cleanCascadeCache.remove(txnId);
}
}
@Override
@@ -1460,9 +1496,9 @@ public class SolrInformationServer implements InformationServer
long start = System.nanoTime();
if ((node.getStatus() == SolrApiNodeStatus.DELETED)
|| (node.getStatus() == SolrApiNodeStatus.NON_SHARD_DELETED)
|| (node.getStatus() == SolrApiNodeStatus.NON_SHARD_UPDATED)
|| (node.getStatus() == SolrApiNodeStatus.UNKNOWN))
|| (node.getStatus() == SolrApiNodeStatus.UNKNOWN)
|| cascadeTrackingEnabled() && ((node.getStatus() == SolrApiNodeStatus.NON_SHARD_DELETED)
|| (node.getStatus() == SolrApiNodeStatus.NON_SHARD_UPDATED)))
{
// fix up any secondary paths
NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
@@ -1470,8 +1506,8 @@ public class SolrInformationServer implements InformationServer
nmdp.setToNodeId(node.getId());
List<NodeMetaData> nodeMetaDatas;
if ((node.getStatus() == SolrApiNodeStatus.DELETED)
|| (node.getStatus() == SolrApiNodeStatus.NON_SHARD_DELETED)
|| (node.getStatus() == SolrApiNodeStatus.NON_SHARD_UPDATED))
|| cascadeTrackingEnabled() && ((node.getStatus() == SolrApiNodeStatus.NON_SHARD_DELETED)
|| (node.getStatus() == SolrApiNodeStatus.NON_SHARD_UPDATED)))
{
// Fake the empty node metadata for this parent deleted node
NodeMetaData nodeMetaData = createDeletedNodeMetaData(node);
@@ -1508,10 +1544,9 @@ public class SolrInformationServer implements InformationServer
deleteNode(processor, request, node);
}
if ((node.getStatus() == SolrApiNodeStatus.UPDATED)
|| (node.getStatus() == SolrApiNodeStatus.UNKNOWN)
|| (node.getStatus() == SolrApiNodeStatus.NON_SHARD_UPDATED))
if (node.getStatus() == SolrApiNodeStatus.UPDATED
|| node.getStatus() == SolrApiNodeStatus.UNKNOWN
|| (cascadeTrackingEnabled() && node.getStatus() == SolrApiNodeStatus.NON_SHARD_UPDATED))
{
long nodeId = node.getId();
@@ -1785,7 +1820,7 @@ public class SolrInformationServer implements InformationServer
@Override
public void indexNodes(List<Node> nodes, boolean overwrite, boolean cascade) throws IOException, JSONException
public void indexNodes(List<Node> nodes, boolean overwrite) throws IOException, JSONException
{
UpdateRequestProcessor processor = null;
try (SolrQueryRequest request = newSolrQueryRequest())
@@ -1796,8 +1831,13 @@ public class SolrInformationServer implements InformationServer
EnumMap<SolrApiNodeStatus, List<Long>> nodeStatusToNodeIds = new EnumMap<>(SolrApiNodeStatus.class);
categorizeNodes(nodes, nodeIdsToNodes, nodeStatusToNodeIds);
List<Long> deletedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.DELETED));
List<Long> shardDeletedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.NON_SHARD_DELETED));
List<Long> shardUpdatedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.NON_SHARD_UPDATED));
List<Long> shardDeletedNodeIds = Collections.emptyList();
List<Long> shardUpdatedNodeIds = Collections.emptyList();
if (cascadeTrackingEnabled())
{
shardDeletedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.NON_SHARD_DELETED));
shardUpdatedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.NON_SHARD_UPDATED));
}
List<Long> unknownNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.UNKNOWN));
List<Long> updatedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.UPDATED));
@@ -1881,7 +1921,7 @@ public class SolrInformationServer implements InformationServer
continue;
}
if (nodeIdsToNodes.get(nodeMetaData.getId()).getStatus() == SolrApiNodeStatus.NON_SHARD_UPDATED)
if (cascadeTrackingEnabled() && nodeIdsToNodes.get(nodeMetaData.getId()).getStatus() == SolrApiNodeStatus.NON_SHARD_UPDATED)
{
if (nodeMetaData.getProperties().get(ContentModel.PROP_CASCADE_TX) != null)
{
@@ -2629,7 +2669,10 @@ public class SolrInformationServer implements InformationServer
input.addField(FIELD_INTXID, txn.getId());
input.addField(FIELD_TXCOMMITTIME, txn.getCommitTimeMs());
input.addField(FIELD_DOC_TYPE, DOC_TYPE_TX);
input.addField(FIELD_CASCADE_FLAG, 0);
if (cascadeTrackingEnabled())
{
input.addField(FIELD_CASCADE_FLAG, 0);
}
cmd.solrDoc = input;
processor.processAdd(cmd);
}
@@ -2670,8 +2713,11 @@ public class SolrInformationServer implements InformationServer
input.addField(FIELD_S_TXID, info.getId());
input.addField(FIELD_S_TXCOMMITTIME, info.getCommitTimeMs());
//Set the cascade flag to 1. This means cascading updates have not been done yet.
input.addField(FIELD_CASCADE_FLAG, 1);
if (cascadeTrackingEnabled())
{
//Set the cascade flag to 1. This means cascading updates have not been done yet.
input.addField(FIELD_CASCADE_FLAG, 1);
}
cmd.solrDoc = input;
processor.processAdd(cmd);
@@ -2943,13 +2989,13 @@ public class SolrInformationServer implements InformationServer
if(batch.size() >= 200)
{
indexNodes(batch, true, true);
indexNodes(batch, true);
batch.clear();
}
}
if(batch.size() > 0)
{
indexNodes(batch, true, true);
indexNodes(batch, true);
batch.clear();
}
}

View File

@@ -21,6 +21,15 @@ package org.alfresco.solr.lifecycle;
import static java.util.Arrays.asList;
import static java.util.Optional.ofNullable;
import static org.alfresco.solr.SolrInformationServer.CASCADE_TRACKER_ENABLED;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Properties;
import java.util.function.Function;
import java.util.function.Predicate;
import org.alfresco.opencmis.dictionary.CMISStrictDictionaryService;
import org.alfresco.solr.AlfrescoCoreAdminHandler;
import org.alfresco.solr.AlfrescoSolrDataModel;
@@ -54,13 +63,6 @@ import org.apache.solr.search.SolrIndexSearcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Properties;
import java.util.function.Function;
import java.util.function.Predicate;
/**
* Listeners for *FIRST SEARCHER* events in order to prepare and register the SolrContentStore and the Tracking Subsystem.
*
@@ -257,19 +259,27 @@ public class SolrCoreLoadListener extends AbstractSolrEventListener
trackerRegistry,
scheduler);
CascadeTracker cascadeTracker =
registerAndSchedule(
new CascadeTracker(props, repositoryClient, core.getName(), srv),
core,
props,
trackerRegistry,
scheduler);
List<Tracker> trackers = new ArrayList<>();
String cascadeTrackerEnabledProp = ofNullable((String) props.get(CASCADE_TRACKER_ENABLED)).orElse("true");
if (Boolean.valueOf(cascadeTrackerEnabledProp))
{
CascadeTracker cascadeTracker =
registerAndSchedule(
new CascadeTracker(props, repositoryClient, core.getName(), srv),
core,
props,
trackerRegistry,
scheduler);
trackers.add(cascadeTracker);
}
//The CommitTracker will acquire these locks in order
//The ContentTracker will likely have the longest runs so put it first to ensure the MetadataTracker is not paused while
//waiting for the ContentTracker to release it's lock.
//The aclTracker will likely have the shortest runs so put it last.
return asList(cascadeTracker, contentTracker, metadataTracker, aclTracker);
trackers.addAll(asList(contentTracker, metadataTracker, aclTracker));
return trackers;
}
/**

View File

@@ -19,7 +19,11 @@
package org.alfresco.solr.tracker;
import static java.util.Optional.empty;
import static java.util.Optional.ofNullable;
import java.util.List;
import java.util.Optional;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicInteger;
@@ -41,7 +45,8 @@ public class CommitTracker extends AbstractTracker
private MetadataTracker metadataTracker;
private AclTracker aclTracker;
private ContentTracker contentTracker;
private CascadeTracker cascadeTracker;
/** The cascade tracker. Note that this may be empty if cascade tracking is disabled. */
private Optional<CascadeTracker> cascadeTracker = empty();
private AtomicInteger rollbackCount = new AtomicInteger(0);
protected final static Logger log = LoggerFactory.getLogger(CommitTracker.class);
@@ -71,7 +76,7 @@ public class CommitTracker extends AbstractTracker
} else if(tracker instanceof ContentTracker) {
this.contentTracker = (ContentTracker)tracker;
} else if(tracker instanceof CascadeTracker) {
this.cascadeTracker = (CascadeTracker)tracker;
this.cascadeTracker = ofNullable((CascadeTracker) tracker);
}
}
@@ -182,8 +187,11 @@ public class CommitTracker extends AbstractTracker
contentTracker.getWriteLock().acquire();
assert(contentTracker.getWriteLock().availablePermits() == 0);
cascadeTracker.getWriteLock().acquire();
assert(cascadeTracker.getWriteLock().availablePermits() == 0);
if (cascadeTracker.isPresent())
{
cascadeTracker.get().getWriteLock().acquire();
assert (cascadeTracker.get().getWriteLock().availablePermits() == 0);
}
infoSrv.rollback();
}
@@ -206,12 +214,12 @@ public class CommitTracker extends AbstractTracker
contentTracker.invalidateState();
//Reset cascadeTracker
cascadeTracker.setRollback(false);
cascadeTracker.invalidateState();
cascadeTracker.ifPresent(c -> c.setRollback(false));
cascadeTracker.ifPresent(c -> invalidateState());
//Release the locks
contentTracker.getWriteLock().release();
cascadeTracker.getWriteLock().release();
cascadeTracker.ifPresent(c -> c.getWriteLock().release());
rollbackCount.incrementAndGet();
}

View File

@@ -32,6 +32,7 @@ import org.alfresco.repo.index.shard.ShardState;
import org.alfresco.solr.BoundedDeque;
import org.alfresco.solr.InformationServer;
import org.alfresco.solr.NodeReport;
import org.alfresco.solr.SolrInformationServer;
import org.alfresco.solr.TrackerState;
import org.alfresco.solr.adapters.IOpenBitSet;
import org.alfresco.solr.client.GetNodesParameters;
@@ -83,6 +84,8 @@ public class MetadataTracker extends CoreStatePublisher implements Tracker
* {@link org.alfresco.solr.client.SOLRAPIClient#GET_TX_INTERVAL_COMMIT_TIME}
*/
private boolean txIntervalCommitTimeServiceAvailable = false;
/** Whether the cascade tracking is enabled. */
private boolean cascadeTrackerEnabled = true;
public MetadataTracker(final boolean isMaster, Properties p, SOLRAPIClient client, String coreName,
InformationServer informationServer)
@@ -107,6 +110,7 @@ public class MetadataTracker extends CoreStatePublisher implements Tracker
transactionDocsBatchSize = Integer.parseInt(p.getProperty("alfresco.transactionDocsBatchSize", "100"));
nodeBatchSize = Integer.parseInt(p.getProperty("alfresco.nodeBatchSize", "10"));
threadHandler = new ThreadHandler(p, coreName, "MetadataTracker");
cascadeTrackerEnabled = informationServer.cascadeTrackingEnabled();
// In order to apply performance optimizations, checking the availability of Repo Web Scripts is required.
// As these services are available from ACS 6.2
@@ -957,7 +961,7 @@ public class MetadataTracker extends CoreStatePublisher implements Tracker
List<Node> filteredNodes = filterNodes(nodes);
if(filteredNodes.size() > 0)
{
this.infoServer.indexNodes(filteredNodes, true, false);
this.infoServer.indexNodes(filteredNodes, true);
}
}
@@ -977,9 +981,8 @@ public class MetadataTracker extends CoreStatePublisher implements Tracker
{
filteredList.add(node);
}
else
else if (cascadeTrackerEnabled)
{
if(node.getStatus() == SolrApiNodeStatus.UPDATED)
{
Node doCascade = new Node();

View File

@@ -31,3 +31,6 @@ alfresco.cross.locale.property.1={http://www.alfresco.org/model/content/1.0}lock
# alfresco.cross.locale.datatype.2={http://www.alfresco.org/model/dictionary/1.0}mltext
alfresco.model.tracker.cron=0/10 * * * * ? *
# Whether path queries are enabled.
alfresco.cascade.tracker.enabled=true

View File

@@ -18,6 +18,30 @@
*/
package org.alfresco.solr.lifecycle;
import static java.util.Arrays.asList;
import static org.alfresco.solr.SolrInformationServer.CASCADE_TRACKER_ENABLED;
import static org.alfresco.solr.tracker.Tracker.Type.ACL;
import static org.alfresco.solr.tracker.Tracker.Type.CASCADE;
import static org.alfresco.solr.tracker.Tracker.Type.CONTENT;
import static org.alfresco.solr.tracker.Tracker.Type.METADATA;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.same;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.stream.Collectors;
import org.alfresco.solr.SolrInformationServer;
import org.alfresco.solr.client.SOLRAPIClient;
import org.alfresco.solr.tracker.AclTracker;
@@ -26,6 +50,7 @@ import org.alfresco.solr.tracker.ContentTracker;
import org.alfresco.solr.tracker.MetadataTracker;
import org.alfresco.solr.tracker.SolrTrackerScheduler;
import org.alfresco.solr.tracker.Tracker;
import org.alfresco.solr.tracker.Tracker.Type;
import org.alfresco.solr.tracker.TrackerRegistry;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrCore;
@@ -36,20 +61,6 @@ import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.xml.sax.InputSource;
import java.util.List;
import java.util.Properties;
import static java.util.Arrays.asList;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.same;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* Unit tests for the {@link SolrCoreLoadListener}.
*
@@ -83,6 +94,8 @@ public class SolrCoreLoadListenerTest
@Before
public void setUp()
{
initMocks(this);
listener = new SolrCoreLoadListener(core);
when(core.getName()).thenReturn(coreName);
@@ -104,7 +117,29 @@ public class SolrCoreLoadListenerTest
verify(scheduler).schedule(any(MetadataTracker.class), eq(coreName), same(coreProperties));
verify(scheduler).schedule(any(CascadeTracker.class), eq(coreName), same(coreProperties));
assertEquals(4, coreTrackers.size());
Set<Type> trackerTypes = coreTrackers.stream().map(Tracker::getType).collect(Collectors.toSet());
assertEquals("Unexpected trackers found.", Set.of(ACL, CONTENT, METADATA, CASCADE), trackerTypes);
}
@Test
public void testDisabledCascadeTracking()
{
coreProperties.put(CASCADE_TRACKER_ENABLED, "false");
List<Tracker> coreTrackers = listener.createAndScheduleCoreTrackers(core, registry, coreProperties, scheduler, api, informationServer);
verify(registry).register(eq(coreName), any(AclTracker.class));
verify(registry).register(eq(coreName), any(ContentTracker.class));
verify(registry).register(eq(coreName), any(MetadataTracker.class));
verify(registry, never()).register(eq(coreName), any(CascadeTracker.class));
verify(scheduler).schedule(any(AclTracker.class), eq(coreName), same(coreProperties));
verify(scheduler).schedule(any(ContentTracker.class), eq(coreName), same(coreProperties));
verify(scheduler).schedule(any(MetadataTracker.class), eq(coreName), same(coreProperties));
verify(scheduler, never()).schedule(any(CascadeTracker.class), eq(coreName), same(coreProperties));
Set<Type> trackerTypes = coreTrackers.stream().map(Tracker::getType).collect(Collectors.toSet());
assertEquals("Unexpected trackers found.", Set.of(ACL, CONTENT, METADATA), trackerTypes);
}
@Test

View File

@@ -52,7 +52,7 @@ import java.util.List;
@LuceneTestCase.SuppressCodecs({"Appending","Lucene3x","Lucene40","Lucene41","Lucene42","Lucene43", "Lucene44", "Lucene45","Lucene46","Lucene47","Lucene48","Lucene49"})
@SolrTestCaseJ4.SuppressSSL
public class CascadeTrackerIT extends AbstractAlfrescoSolrIT
public class CascadingIT extends AbstractAlfrescoSolrIT
{
private static long MAX_WAIT_TIME = 80000;

View File

@@ -55,7 +55,7 @@ import java.util.Properties;
*/
@SolrTestCaseJ4.SuppressSSL
@LuceneTestCase.SuppressCodecs({"Appending","Lucene3x","Lucene40","Lucene41","Lucene42","Lucene43", "Lucene44", "Lucene45","Lucene46","Lucene47","Lucene48","Lucene49"})
public class DistributedCascadeTrackerIT extends AbstractAlfrescoDistributedIT
public class DistributedCascadeIT extends AbstractAlfrescoDistributedIT
{
private Node parentFolder;
private NodeMetaData parentFolderMetadata;

View File

@@ -120,7 +120,7 @@ public class MetadataTrackerTest
this.metadataTracker.doTrack();
InOrder inOrder = inOrder(srv);
inOrder.verify(srv).indexNodes(nodes, true, false);
inOrder.verify(srv).indexNodes(nodes, true);
inOrder.verify(srv).indexTransaction(tx, true);
inOrder.verify(srv).commit();
}