mirror of
https://github.com/Alfresco/SearchServices.git
synced 2025-09-10 14:11:25 +00:00
ACS-6195 Add transform_failed integration test (#1919)
ACS-6195 Add transform_failed integration test
This commit is contained in:
committed by
GitHub
parent
037dbec4ea
commit
f9135204d2
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Search Services
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2023 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -75,7 +75,6 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.stream.Stream;
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Search Services
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2023 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -41,7 +41,6 @@ import static org.alfresco.solr.AlfrescoSolrUtils.list;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.search.adaptor.QueryConstants;
|
||||
@@ -51,8 +50,8 @@ import org.alfresco.solr.AbstractAlfrescoSolrIT;
|
||||
import org.alfresco.solr.client.Acl;
|
||||
import org.alfresco.solr.client.AclChangeSet;
|
||||
import org.alfresco.solr.client.AclReaders;
|
||||
import org.alfresco.solr.client.ContentPropertyValue;
|
||||
import org.alfresco.solr.client.Node;
|
||||
import org.alfresco.solr.client.Node.SolrApiNodeStatus;
|
||||
import org.alfresco.solr.client.NodeMetaData;
|
||||
import org.alfresco.solr.client.SOLRAPIQueueClient;
|
||||
import org.alfresco.solr.client.StringPropertyValue;
|
||||
@@ -98,6 +97,7 @@ public class AlfrescoSolrTrackerIT extends AbstractAlfrescoSolrIT
|
||||
SOLRAPIQueueClient.ACL_READERS_MAP.clear();
|
||||
SOLRAPIQueueClient.ACL_MAP.clear();
|
||||
SOLRAPIQueueClient.NODE_MAP.clear();
|
||||
SOLRAPIQueueClient.NODE_FAILED_TRANSFORM_LIST.clear();
|
||||
}
|
||||
|
||||
|
||||
@@ -144,20 +144,23 @@ public class AlfrescoSolrTrackerIT extends AbstractAlfrescoSolrIT
|
||||
//Next create two nodes to update for the transaction
|
||||
Node folderNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
|
||||
Node fileNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
|
||||
Node transformFailedNode = getNode(txn, acl, SolrApiNodeStatus.UPDATED);
|
||||
Node errorNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
|
||||
logger.info("######### error node:"+errorNode.getId());
|
||||
|
||||
//Next create the NodeMetaData for each node. TODO: Add more metadata
|
||||
NodeMetaData folderMetaData = getNodeMetaData(folderNode, txn, acl, "mike", null, false);
|
||||
NodeMetaData fileMetaData = getNodeMetaData(fileNode, txn, acl, "mike", ancestors(folderMetaData.getNodeRef()), false);
|
||||
NodeMetaData transformFailedMetaData = getNodeMetaData(transformFailedNode, txn, acl, "lisa", ancestors(folderMetaData.getNodeRef()), false);
|
||||
//The errorNodeMetaData will cause an exception.
|
||||
NodeMetaData errorMetaData = getNodeMetaData(errorNode, txn, acl, "lisa", ancestors(folderMetaData.getNodeRef()), true);
|
||||
SOLRAPIQueueClient.NODE_FAILED_TRANSFORM_LIST.add(transformFailedNode.getId());
|
||||
|
||||
//Index the transaction, nodes, and nodeMetaDatas.
|
||||
//Note that the content is automatically created by the test framework.
|
||||
indexTransaction(txn,
|
||||
list(errorNode, folderNode, fileNode),
|
||||
list(errorMetaData, folderMetaData, fileMetaData));
|
||||
list(errorNode, folderNode, fileNode, transformFailedNode),
|
||||
list(errorMetaData, folderMetaData, fileMetaData, transformFailedMetaData));
|
||||
|
||||
//Check for the TXN state stamp.
|
||||
logger.info("#################### Started Second Test ##############################");
|
||||
@@ -195,16 +198,24 @@ public class AlfrescoSolrTrackerIT extends AbstractAlfrescoSolrIT
|
||||
logger.info("#################### Passed Fourth Test ##############################");
|
||||
|
||||
|
||||
|
||||
|
||||
//Check for the error doc
|
||||
|
||||
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_DOC_TYPE, "ErrorNode")), 1, MAX_WAIT_TIME);
|
||||
|
||||
logger.info("#################### Passed Fifth Test ##############################");
|
||||
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.add(new BooleanClause(new TermQuery(new Term("content@s__tr_status@{http://www.alfresco.org/model/content/1.0}content", "transform_failed")), BooleanClause.Occur.MUST));
|
||||
waitForDocCount(builder.build(), 1, MAX_WAIT_TIME);
|
||||
|
||||
SOLRAPIQueueClient.NODE_FAILED_TRANSFORM_LIST.clear();
|
||||
|
||||
//Check if the node with the transform_failed status has been reindexed
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.add(new BooleanClause(new TermQuery(new Term("content@s__tr_status@{http://www.alfresco.org/model/content/1.0}content", "transform_failed")), BooleanClause.Occur.MUST));
|
||||
waitForDocCount(builder.build(), 0, MAX_WAIT_TIME);
|
||||
|
||||
logger.info("#################### Passed Sixth Test ##############################");
|
||||
|
||||
//Mark the folder as needing cascade
|
||||
Transaction txn1 = getTransaction(0, 1);
|
||||
@@ -228,7 +239,7 @@ public class AlfrescoSolrTrackerIT extends AbstractAlfrescoSolrIT
|
||||
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_TXID, txn1.getId(), txn1.getId() + 1, true, false), BooleanClause.Occur.MUST));
|
||||
waitForDocCount(builder.build(), 1, MAX_WAIT_TIME);
|
||||
|
||||
logger.info("#################### Passed Sixth Test ##############################");
|
||||
logger.info("#################### Passed Seventh Test ##############################");
|
||||
|
||||
|
||||
TermQuery termQuery1 = new TermQuery(new Term(QueryConstants.FIELD_ANCESTOR, nodeRef.toString()));
|
||||
@@ -246,13 +257,13 @@ public class AlfrescoSolrTrackerIT extends AbstractAlfrescoSolrIT
|
||||
assertQ(req, "*[count(//doc)=1]","//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']");
|
||||
|
||||
|
||||
logger.info("#################### Passed Seventh Test ##############################");
|
||||
logger.info("#################### Passed Eighth Test ##############################");
|
||||
|
||||
|
||||
//Check that both documents have been indexed and have content.
|
||||
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 2, MAX_WAIT_TIME);
|
||||
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 3, MAX_WAIT_TIME);
|
||||
|
||||
logger.info("#################### Passed Eighth Test ##############################");
|
||||
logger.info("#################### Passed Ninth Test ##############################");
|
||||
|
||||
testIsContentIndexed(acl);
|
||||
|
||||
@@ -273,9 +284,9 @@ public class AlfrescoSolrTrackerIT extends AbstractAlfrescoSolrIT
|
||||
|
||||
logger.info("############################ Bulk Nodes:" + nodes.size());
|
||||
indexTransaction(txn2, nodes, nodeMetaDatas);
|
||||
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 552, MAX_WAIT_TIME);
|
||||
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 553, MAX_WAIT_TIME);
|
||||
|
||||
logger.info("#################### Passed Ninth Test ##############################");
|
||||
logger.info("#################### Passed Tenth Test ##############################");
|
||||
|
||||
for(int i=0; i<1000; i++)
|
||||
{
|
||||
@@ -289,9 +300,9 @@ public class AlfrescoSolrTrackerIT extends AbstractAlfrescoSolrIT
|
||||
indexTransaction(txnX, nodesX, nodeMetaDatasX);
|
||||
}
|
||||
|
||||
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 1552, MAX_WAIT_TIME);
|
||||
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 1553, MAX_WAIT_TIME);
|
||||
|
||||
logger.info("#################### Passed Tenth Test ##############################");
|
||||
logger.info("#################### Passed Eleventh Test ##############################");
|
||||
|
||||
|
||||
//Test the maintenance methods
|
||||
@@ -308,7 +319,7 @@ public class AlfrescoSolrTrackerIT extends AbstractAlfrescoSolrIT
|
||||
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_OWNER, "amy")), BooleanClause.Occur.MUST));
|
||||
waitForDocCount(builder.build(), 1, MAX_WAIT_TIME);
|
||||
|
||||
logger.info("#################### Passed Eleventh Test ##############################");
|
||||
logger.info("#################### Passed Twelfth Test ##############################");
|
||||
|
||||
|
||||
// Wait for a document that has the new owner and the content populated.
|
||||
@@ -317,7 +328,7 @@ public class AlfrescoSolrTrackerIT extends AbstractAlfrescoSolrIT
|
||||
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_OWNER, "jill")), BooleanClause.Occur.MUST));
|
||||
waitForDocCount(builder.build(), 1, MAX_WAIT_TIME);
|
||||
|
||||
logger.info("#################### Passed Twelth Test ##############################");
|
||||
logger.info("#################### Passed Thirteenth Test ##############################");
|
||||
|
||||
|
||||
|
||||
@@ -399,7 +410,7 @@ public class AlfrescoSolrTrackerIT extends AbstractAlfrescoSolrIT
|
||||
|
||||
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "paul")), 0, MAX_WAIT_TIME); //paul should be purged
|
||||
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(fileNode.getId()))), 0, MAX_WAIT_TIME);
|
||||
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 1001, MAX_WAIT_TIME); // Refects the purged node and transaction
|
||||
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 1002, MAX_WAIT_TIME); // Refects the purged node and transaction
|
||||
|
||||
logger.info("#################### Passed Eighteenth Test ##############################");
|
||||
|
||||
@@ -424,7 +435,7 @@ public class AlfrescoSolrTrackerIT extends AbstractAlfrescoSolrIT
|
||||
private void testIsContentIndexed(Acl acl) throws Exception
|
||||
{
|
||||
// number of existing documents before running this test
|
||||
final int previousNumberOfDocuments = 2;
|
||||
final int previousNumberOfDocuments = 3;
|
||||
|
||||
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), previousNumberOfDocuments, MAX_WAIT_TIME);
|
||||
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Search Services
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2022 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2023 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -36,7 +36,6 @@ import java.net.ConnectException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@@ -66,6 +65,7 @@ public class SOLRAPIQueueClient extends SOLRAPIClient
|
||||
public final static Map<Long, List<Node>> NODE_MAP = Collections.synchronizedMap(new HashMap<>());
|
||||
public final static Map<Long, NodeMetaData> NODE_META_DATA_MAP = Collections.synchronizedMap(new HashMap<>());
|
||||
public final static Map<Long, Map<QName, String>> NODE_CONTENT_MAP = Collections.synchronizedMap(new HashMap<>());
|
||||
public final static List<Long> NODE_FAILED_TRANSFORM_LIST = Collections.synchronizedList(new ArrayList<>());
|
||||
|
||||
private static boolean throwException;
|
||||
|
||||
@@ -362,6 +362,11 @@ public class SOLRAPIQueueClient extends SOLRAPIClient
|
||||
throw new ConnectException("THROWING EXCEPTION, better be ready!");
|
||||
}
|
||||
|
||||
if(NODE_FAILED_TRANSFORM_LIST.contains(nodeId))
|
||||
{
|
||||
return new GetTextContentResponse(new DummyTransformFailedResponse("transformFailed"));
|
||||
}
|
||||
|
||||
if(NODE_CONTENT_MAP.containsKey(nodeId))
|
||||
{
|
||||
return new GetTextContentResponse(new DummyResponse(NODE_CONTENT_MAP.get(nodeId).get(propertyQName)));
|
||||
@@ -410,6 +415,50 @@ public class SOLRAPIQueueClient extends SOLRAPIClient
|
||||
}
|
||||
}
|
||||
|
||||
private static class DummyTransformFailedResponse implements Response
|
||||
{
|
||||
private final String text;
|
||||
|
||||
public DummyTransformFailedResponse(String text)
|
||||
{
|
||||
this.text = text;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream getContentAsStream()
|
||||
{
|
||||
return new ByteArrayInputStream(text.getBytes());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getStatus()
|
||||
{
|
||||
return HttpStatus.SC_NO_CONTENT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHeader(String key)
|
||||
{
|
||||
if("X-Alfresco-transformStatus".equals(key))
|
||||
{
|
||||
return "transformFailed";
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getContentType()
|
||||
{
|
||||
return "text/html";
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close()
|
||||
{
|
||||
|
Reference in New Issue
Block a user