mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-09-17 14:21:39 +00:00
Revert "Merged alfresco-repository feature/REPO-4334_metadata to new projects"
This reverts commit 90bf90b255
made to the wrong branch.
This commit is contained in:
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2017 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -45,12 +45,9 @@ import org.junit.runners.Suite;
|
||||
org.alfresco.repo.action.evaluator.HasAspectEvaluatorTest.class,
|
||||
org.alfresco.repo.action.executer.SetPropertyValueActionExecuterTest.class,
|
||||
org.alfresco.repo.action.executer.AddFeaturesActionExecuterTest.class,
|
||||
|
||||
org.alfresco.repo.action.executer.ContentMetadataExtracterTest.class,
|
||||
org.alfresco.repo.action.executer.ContentMetadataExtracterTagMappingTest.class,
|
||||
org.alfresco.repo.action.executer.ContentMetadataEmbedderTest.class,
|
||||
org.alfresco.repo.action.executer.AsynchronousExtractorTest.class,
|
||||
|
||||
org.alfresco.repo.rule.RuleLinkTest.class,
|
||||
org.alfresco.repo.rule.RuleServiceCoverageTest.class,
|
||||
org.alfresco.repo.rule.RuleServiceImplTest.class,
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -25,11 +25,22 @@
|
||||
*/
|
||||
package org.alfresco.repo.action.executer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.action.ActionImpl;
|
||||
import org.alfresco.repo.content.MimetypeMap;
|
||||
import org.alfresco.repo.content.metadata.AbstractMappingMetadataExtracter;
|
||||
import org.alfresco.repo.content.metadata.MetadataExtracterRegistry;
|
||||
import org.alfresco.repo.content.metadata.TikaPoweredMetadataExtracter;
|
||||
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationComponent;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
@@ -44,25 +55,15 @@ import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.util.BaseSpringTest;
|
||||
import org.alfresco.util.GUID;
|
||||
import org.apache.tika.embedder.Embedder;
|
||||
import org.apache.tika.exception.TikaException;
|
||||
import org.apache.tika.metadata.Metadata;
|
||||
import org.apache.tika.mime.MediaType;
|
||||
import org.apache.tika.parser.ParseContext;
|
||||
import org.junit.After;
|
||||
import org.apache.tika.parser.Parser;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Test of the ActionExecuter for embedding metadata
|
||||
*
|
||||
@@ -93,9 +94,7 @@ public class ContentMetadataEmbedderTest extends BaseSpringTest
|
||||
this.dictionaryService = (DictionaryService) this.applicationContext.getBean("dictionaryService");
|
||||
this.mimetypeService = (MimetypeService) this.applicationContext.getBean("mimetypeService");
|
||||
this.metadataExtracterRegistry = (MetadataExtracterRegistry) this.applicationContext.getBean("metadataExtracterRegistry");
|
||||
metadataExtracterRegistry.setAsyncExtractEnabled(false);
|
||||
metadataExtracterRegistry.setAsyncEmbedEnabled(false);
|
||||
|
||||
|
||||
AuthenticationComponent authenticationComponent = (AuthenticationComponent)applicationContext.getBean("authenticationComponent");
|
||||
authenticationComponent.setSystemUserAsCurrentUser();
|
||||
|
||||
@@ -124,21 +123,15 @@ public class ContentMetadataEmbedderTest extends BaseSpringTest
|
||||
this.executer.setApplicableTypes(new String[] { ContentModel.TYPE_CONTENT.toString() });
|
||||
}
|
||||
|
||||
@After
|
||||
public void after()
|
||||
{
|
||||
metadataExtracterRegistry.setAsyncExtractEnabled(true);
|
||||
metadataExtracterRegistry.setAsyncEmbedEnabled(true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that a failing embedder does not destroy the original content
|
||||
*/
|
||||
@Test
|
||||
public void testFailingEmbedder()
|
||||
{
|
||||
AbstractMappingMetadataExtracter embedder = new FailingMappingMetadataEmbedder(Arrays.asList(MimetypeMap.MIMETYPE_PDF));
|
||||
embedder.setRegistry(metadataExtracterRegistry);
|
||||
MetadataExtracterRegistry registry = (MetadataExtracterRegistry) applicationContext.getBean("metadataExtracterRegistry");
|
||||
FailingEmbedder embedder = new FailingEmbedder(Arrays.asList(MimetypeMap.MIMETYPE_PDF));
|
||||
embedder.setRegistry(registry);
|
||||
embedder.setDictionaryService(this.dictionaryService);
|
||||
embedder.setMimetypeService(this.mimetypeService);
|
||||
embedder.register();
|
||||
@@ -165,16 +158,17 @@ public class ContentMetadataEmbedderTest extends BaseSpringTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Embedder which fails upon calling embed on its {@link FailingEmbedder}
|
||||
* Tika-powered embedder which fails upon calling embed on its {@link FailingTikaEmbedder}
|
||||
*/
|
||||
private class FailingMappingMetadataEmbedder extends AbstractMappingMetadataExtracter
|
||||
private class FailingEmbedder extends TikaPoweredMetadataExtracter
|
||||
{
|
||||
|
||||
/**
|
||||
* Constructor for setting supported extract and embed mimetypes
|
||||
*
|
||||
* @param mimetypes the supported extract and embed mimetypes
|
||||
*/
|
||||
public FailingMappingMetadataEmbedder(Collection<String> mimetypes)
|
||||
public FailingEmbedder(Collection<String> mimetypes)
|
||||
{
|
||||
super(
|
||||
new HashSet<String>(mimetypes),
|
||||
@@ -182,26 +176,15 @@ public class ContentMetadataEmbedderTest extends BaseSpringTest
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void embedInternal(Map<String, Serializable> metadata, ContentReader reader, ContentWriter writer) throws Throwable
|
||||
protected Parser getParser()
|
||||
{
|
||||
Embedder embedder = getEmbedder();
|
||||
if (embedder == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
Map<String, String> metadataAsStrings = convertMetadataToStrings(metadata);
|
||||
Metadata metadataToEmbed = new Metadata();
|
||||
metadataAsStrings.forEach((k,v)->metadataToEmbed.add(k, v));
|
||||
|
||||
InputStream inputStream = reader.getContentInputStream();
|
||||
OutputStream outputStream = writer.getContentOutputStream();
|
||||
embedder.embed(metadataToEmbed, null, outputStream, null);
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Embedder getEmbedder()
|
||||
{
|
||||
return new FailingEmbedder();
|
||||
return new FailingTikaEmbedder();
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -219,18 +202,12 @@ public class ContentMetadataEmbedderTest extends BaseSpringTest
|
||||
mapping.put("author", qnames);
|
||||
return mapping;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Map<String, Serializable> extractRaw(ContentReader reader) throws Throwable
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Metadata embedder which fails on a call to embed.
|
||||
* Tika metadata embedder which fails on a call to embed.
|
||||
*/
|
||||
private class FailingEmbedder implements Embedder
|
||||
private class FailingTikaEmbedder implements Embedder
|
||||
{
|
||||
private static final long serialVersionUID = -4954679684941467571L;
|
||||
|
||||
@@ -242,7 +219,7 @@ public class ContentMetadataEmbedderTest extends BaseSpringTest
|
||||
|
||||
@Override
|
||||
public void embed(Metadata metadata, InputStream originalStream, OutputStream outputStream, ParseContext context)
|
||||
throws IOException
|
||||
throws IOException, TikaException
|
||||
{
|
||||
throw new IOException("Forced failure");
|
||||
}
|
||||
|
@@ -45,6 +45,7 @@ import org.alfresco.repo.action.AsynchronousActionExecutionQueuePolicies;
|
||||
import org.alfresco.repo.content.MimetypeMap;
|
||||
import org.alfresco.repo.content.metadata.AbstractMappingMetadataExtracter;
|
||||
import org.alfresco.repo.content.metadata.MetadataExtracterRegistry;
|
||||
import org.alfresco.repo.content.metadata.TikaPoweredMetadataExtracter;
|
||||
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
|
||||
import org.alfresco.repo.policy.Behaviour.NotificationFrequency;
|
||||
import org.alfresco.repo.policy.JavaBehaviour;
|
||||
@@ -73,6 +74,8 @@ import org.alfresco.util.GUID;
|
||||
import org.alfresco.util.testing.category.LuceneTests;
|
||||
import org.alfresco.util.testing.category.RedundantTests;
|
||||
import org.apache.tika.metadata.Metadata;
|
||||
import org.apache.tika.parser.Parser;
|
||||
import org.apache.tika.parser.jpeg.JpegParser;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
|
||||
@@ -109,7 +112,6 @@ public class ContentMetadataExtracterTagMappingTest extends TestCase
|
||||
private TaggingService taggingService;
|
||||
private NodeService nodeService;
|
||||
private ContentService contentService;
|
||||
private MetadataExtracterRegistry metadataExtracterRegistry;
|
||||
private AuditService auditService;
|
||||
private TransactionService transactionService;
|
||||
private AuthenticationComponent authenticationComponent;
|
||||
@@ -142,10 +144,7 @@ public class ContentMetadataExtracterTagMappingTest extends TestCase
|
||||
this.taggingService = (TaggingService)ctx.getBean("TaggingService");
|
||||
this.nodeService = (NodeService) ctx.getBean("NodeService");
|
||||
this.contentService = (ContentService) ctx.getBean("ContentService");
|
||||
this.metadataExtracterRegistry = (MetadataExtracterRegistry) ctx.getBean("metadataExtracterRegistry");
|
||||
metadataExtracterRegistry.setAsyncExtractEnabled(false);
|
||||
metadataExtracterRegistry.setAsyncEmbedEnabled(false);
|
||||
|
||||
|
||||
this.transactionService = (TransactionService)ctx.getBean("transactionComponent");
|
||||
this.auditService = (AuditService)ctx.getBean("auditService");
|
||||
this.authenticationComponent = (AuthenticationComponent)ctx.getBean("authenticationComponent");
|
||||
@@ -209,9 +208,6 @@ public class ContentMetadataExtracterTagMappingTest extends TestCase
|
||||
@Override
|
||||
protected void tearDown() throws Exception
|
||||
{
|
||||
metadataExtracterRegistry.setAsyncExtractEnabled(true);
|
||||
metadataExtracterRegistry.setAsyncEmbedEnabled(true);
|
||||
|
||||
if (AlfrescoTransactionSupport.getTransactionReadState() != TxnReadState.TXN_NONE)
|
||||
{
|
||||
fail("Test is not transaction-safe. Fix up transaction handling and re-test.");
|
||||
@@ -301,7 +297,7 @@ public class ContentMetadataExtracterTagMappingTest extends TestCase
|
||||
});
|
||||
}
|
||||
|
||||
private static class TagMappingMetadataExtracter extends AbstractMappingMetadataExtracter
|
||||
private static class TagMappingMetadataExtracter extends TikaPoweredMetadataExtracter
|
||||
{
|
||||
|
||||
private String existingTagNodeRef;
|
||||
@@ -333,10 +329,16 @@ public class ContentMetadataExtracterTagMappingTest extends TestCase
|
||||
return sourceMimetype.equals(MimetypeMap.MIMETYPE_IMAGE_JPEG);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Parser getParser()
|
||||
{
|
||||
return new JpegParser();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public Map<String, Serializable> extractRaw(ContentReader reader) throws Throwable
|
||||
{
|
||||
Map<String, Serializable> rawMap = newRawMap();
|
||||
Map<String, Serializable> rawMap = super.extractRaw(reader);
|
||||
|
||||
// Add some test keywords to those actually extracted from the file including a nodeRef
|
||||
List<String> keywords = new ArrayList<String>(Arrays.asList(
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -43,11 +43,16 @@
|
||||
*/
|
||||
package org.alfresco.repo.action.executer;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.action.ActionImpl;
|
||||
import org.alfresco.repo.content.MimetypeMap;
|
||||
import org.alfresco.repo.content.metadata.AbstractMappingMetadataExtracter;
|
||||
import org.alfresco.repo.content.metadata.AsynchronousExtractor;
|
||||
import org.alfresco.repo.content.metadata.MetadataExtracterRegistry;
|
||||
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationComponent;
|
||||
@@ -62,18 +67,11 @@ import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.test_category.BaseSpringTestsCategory;
|
||||
import org.alfresco.util.BaseSpringTest;
|
||||
import org.alfresco.util.GUID;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Test of the ActionExecuter for extracting metadata. Note: This test makes
|
||||
* assumptions about the PDF test data for PdfBoxExtracter.
|
||||
@@ -90,7 +88,6 @@ public class ContentMetadataExtracterTest extends BaseSpringTest
|
||||
|
||||
private NodeService nodeService;
|
||||
private ContentService contentService;
|
||||
private MetadataExtracterRegistry registry;
|
||||
private StoreRef testStoreRef;
|
||||
private NodeRef rootNodeRef;
|
||||
private NodeRef nodeRef;
|
||||
@@ -104,10 +101,7 @@ public class ContentMetadataExtracterTest extends BaseSpringTest
|
||||
{
|
||||
this.nodeService = (NodeService) this.applicationContext.getBean("nodeService");
|
||||
this.contentService = (ContentService) this.applicationContext.getBean("contentService");
|
||||
registry = (MetadataExtracterRegistry) applicationContext.getBean("metadataExtracterRegistry");
|
||||
registry.setAsyncExtractEnabled(false);
|
||||
registry.setAsyncEmbedEnabled(false);
|
||||
|
||||
|
||||
AuthenticationComponent authenticationComponent = (AuthenticationComponent)applicationContext.getBean("authenticationComponent");
|
||||
authenticationComponent.setSystemUserAsCurrentUser();
|
||||
|
||||
@@ -132,13 +126,6 @@ public class ContentMetadataExtracterTest extends BaseSpringTest
|
||||
this.executer = (ContentMetadataExtracter) this.applicationContext.getBean("extract-metadata");
|
||||
}
|
||||
|
||||
@After
|
||||
public void after()
|
||||
{
|
||||
registry.setAsyncExtractEnabled(true);
|
||||
registry.setAsyncEmbedEnabled(true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test execution of the extraction itself
|
||||
*/
|
||||
@@ -202,6 +189,7 @@ public class ContentMetadataExtracterTest extends BaseSpringTest
|
||||
@Test
|
||||
public void testUnknownProperties()
|
||||
{
|
||||
MetadataExtracterRegistry registry = (MetadataExtracterRegistry) applicationContext.getBean("metadataExtracterRegistry");
|
||||
TestUnknownMetadataExtracter extracterUnknown = new TestUnknownMetadataExtracter();
|
||||
extracterUnknown.setRegistry(registry);
|
||||
extracterUnknown.register();
|
||||
@@ -259,6 +247,7 @@ public class ContentMetadataExtracterTest extends BaseSpringTest
|
||||
@Test
|
||||
public void testNullExtractedValues_ALF1823()
|
||||
{
|
||||
MetadataExtracterRegistry registry = (MetadataExtracterRegistry) applicationContext.getBean("metadataExtracterRegistry");
|
||||
TestNullPropMetadataExtracter extractor = new TestNullPropMetadataExtracter();
|
||||
extractor.setRegistry(registry);
|
||||
extractor.register();
|
||||
|
@@ -63,13 +63,11 @@ import org.junit.Ignore;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
|
||||
/**
|
||||
* @deprecated The JodConverterMetadataExtracter has not been in use since 6.0.1 and the transformer is also deprecated.
|
||||
*
|
||||
*
|
||||
* @author Neil McErlean
|
||||
* @since 3.3
|
||||
*/
|
||||
@Ignore("This is an abstract class so don't instaniate it or run it in Junit")
|
||||
@Deprecated
|
||||
public abstract class AbstractJodConverterBasedTest
|
||||
{
|
||||
private static Log log = LogFactory.getLog(AbstractJodConverterBasedTest.class);
|
||||
|
@@ -1,559 +0,0 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.repo.action.executer;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.action.ActionImpl;
|
||||
import org.alfresco.repo.content.MimetypeMap;
|
||||
import org.alfresco.repo.content.metadata.AbstractMappingMetadataExtracter;
|
||||
import org.alfresco.repo.content.metadata.AsynchronousExtractor;
|
||||
import org.alfresco.repo.content.metadata.MetadataExtracterRegistry;
|
||||
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
|
||||
import org.alfresco.repo.content.transform.TransformerDebug;
|
||||
import org.alfresco.repo.content.transform.UnsupportedTransformationException;
|
||||
import org.alfresco.repo.rendition2.RenditionDefinition2;
|
||||
import org.alfresco.repo.rendition2.RenditionService2Impl;
|
||||
import org.alfresco.repo.rendition2.TransformClient;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationComponent;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.cmr.repository.ContentIOException;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.cmr.repository.ContentService;
|
||||
import org.alfresco.service.cmr.repository.ContentWriter;
|
||||
import org.alfresco.service.cmr.repository.MimetypeService;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.NodeService;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.cmr.tagging.TaggingService;
|
||||
import org.alfresco.service.namespace.NamespacePrefixResolver;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.alfresco.test_category.BaseSpringTestsCategory;
|
||||
import org.alfresco.transform.client.registry.TransformServiceRegistry;
|
||||
import org.alfresco.util.BaseSpringTest;
|
||||
import org.alfresco.util.GUID;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import javax.transaction.HeuristicMixedException;
|
||||
import javax.transaction.HeuristicRollbackException;
|
||||
import javax.transaction.NotSupportedException;
|
||||
import javax.transaction.RollbackException;
|
||||
import javax.transaction.SystemException;
|
||||
import javax.transaction.UserTransaction;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.Serializable;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.StringJoiner;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.alfresco.model.ContentModel.PROP_CONTENT;
|
||||
import static org.alfresco.model.ContentModel.PROP_CREATED;
|
||||
import static org.alfresco.model.ContentModel.PROP_CREATOR;
|
||||
import static org.alfresco.model.ContentModel.PROP_MODIFIED;
|
||||
import static org.alfresco.model.ContentModel.PROP_MODIFIER;
|
||||
import static org.alfresco.repo.rendition2.RenditionService2Impl.SOURCE_HAS_NO_CONTENT;
|
||||
|
||||
/**
|
||||
* Tests the asynchronous extract and embed of metadata. This is normally performed in a T-Engine, but in this test
|
||||
* class is mocked using a separate Thread that returns well known values. What make the AsynchronousExtractor
|
||||
* different from other {@link AbstractMappingMetadataExtracter} sub classes is that the calling Thread does not
|
||||
* do the work of updating properties or the content, as the T-Engine will reply at some later point.
|
||||
*
|
||||
* @author adavis
|
||||
*/
|
||||
@Category(BaseSpringTestsCategory.class)
|
||||
public class AsynchronousExtractorTest extends BaseSpringTest
|
||||
{
|
||||
private final static String ID = GUID.generate();
|
||||
private static final String AFTER_CALLING_EXECUTE = "after calling execute";
|
||||
private static final String AFTER_THE_TRANSFORM = "after the transform";
|
||||
private static final Integer UNCHANGED_HASHCODE = null;
|
||||
private static final Integer CHANGED_HASHCODE = 1234;
|
||||
private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("EEE MMM dd HH:mm:ss Z yyyy");
|
||||
private static final ExecutorService executorService = Executors.newCachedThreadPool();
|
||||
|
||||
private NodeService nodeService;
|
||||
private ContentService contentService;
|
||||
private DictionaryService dictionaryService;
|
||||
private MimetypeService mimetypeService;
|
||||
private MetadataExtracterRegistry metadataExtracterRegistry;
|
||||
private StoreRef testStoreRef;
|
||||
private NodeRef rootNodeRef;
|
||||
private NodeRef nodeRef;
|
||||
private AsynchronousExtractor asynchronousExtractor;
|
||||
private NamespacePrefixResolver namespacePrefixResolver;
|
||||
private TransformerDebug transformerDebug;
|
||||
private TransactionService transactionService;
|
||||
private TransformServiceRegistry transformServiceRegistry;
|
||||
private TaggingService taggingService;
|
||||
private ContentMetadataExtracter contentMetadataExtracter;
|
||||
private ContentMetadataEmbedder contentMetadataEmbedder;
|
||||
private RenditionService2Impl renditionService2;
|
||||
private TransformClient transformClient;
|
||||
|
||||
private long origSize;
|
||||
private Map<QName, Serializable> origProperties;
|
||||
private Map<QName, Serializable> expectedProperties;
|
||||
private Map<QName, Serializable> properties;
|
||||
|
||||
private class TestAsynchronousExtractor extends AsynchronousExtractor
|
||||
{
|
||||
private final String mockResult;
|
||||
private final Integer changedHashcode;
|
||||
private final Random random = new Random();
|
||||
|
||||
private boolean finished;
|
||||
|
||||
TransformClient mockTransformClient = new TransformClient()
|
||||
{
|
||||
@Override
|
||||
public void checkSupported(NodeRef sourceNodeRef, RenditionDefinition2 renditionDefinition, String sourceMimetype, long sourceSizeInBytes, String contentUrl)
|
||||
{
|
||||
}
|
||||
|
||||
@Override
|
||||
public void transform(NodeRef sourceNodeRef, RenditionDefinition2 renditionDefinition, String user, int sourceContentHashCode)
|
||||
throws UnsupportedTransformationException, ContentIOException
|
||||
{
|
||||
mockTransform(sourceNodeRef, renditionDefinition, sourceContentHashCode);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates an AsynchronousExtractor that simulates a extract or embed.
|
||||
*
|
||||
* @param mockResult if specified indicates a value was returned. The result is read as a resource from
|
||||
* the classpath.
|
||||
* @param changedHashcode if specified indicates that the source node content changed or was deleted between
|
||||
* the request to extract or embed and the response.
|
||||
*/
|
||||
TestAsynchronousExtractor(String mockResult, Integer changedHashcode)
|
||||
{
|
||||
this.mockResult = mockResult;
|
||||
this.changedHashcode = changedHashcode;
|
||||
|
||||
setNodeService(nodeService);
|
||||
setNamespacePrefixResolver(namespacePrefixResolver);
|
||||
setTransformerDebug(transformerDebug);
|
||||
setRenditionService2(renditionService2);
|
||||
setContentService(contentService);
|
||||
setTransactionService(transactionService);
|
||||
setTransformServiceRegistry(transformServiceRegistry);
|
||||
setTaggingService(taggingService);
|
||||
setRegistry(metadataExtracterRegistry);
|
||||
setMimetypeService(mimetypeService);
|
||||
setDictionaryService(dictionaryService);
|
||||
setExecutorService(executorService);
|
||||
register();
|
||||
|
||||
renditionService2.setTransformClient(mockTransformClient);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSupported(String sourceMimetype, long sourceSizeInBytes)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmbedderSupported(String sourceMimetype, long sourceSizeInBytes)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
private void mockTransform(NodeRef sourceNodeRef, RenditionDefinition2 renditionDefinition, int sourceContentHashCode)
|
||||
{
|
||||
try
|
||||
{
|
||||
transformerDebug.pushMisc();
|
||||
wait(50, 700);
|
||||
}
|
||||
finally
|
||||
{
|
||||
transformerDebug.popMisc();
|
||||
}
|
||||
|
||||
int transformContentHashCode = changedHashcode == null ? sourceContentHashCode : changedHashcode;
|
||||
if (mockResult != null)
|
||||
{
|
||||
try (InputStream transformInputStream = getClass().getClassLoader().getResourceAsStream(mockResult))
|
||||
{
|
||||
renditionService2.consume(sourceNodeRef, transformInputStream, renditionDefinition, transformContentHashCode);
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
throw new RuntimeException("Could not read '" + mockResult + "' from the classpath.", e);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
renditionService2.failure(sourceNodeRef, renditionDefinition, transformContentHashCode);
|
||||
}
|
||||
|
||||
synchronized (this)
|
||||
{
|
||||
finished = true;
|
||||
notifyAll();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for a few milliseconds or until the finished flag is set.
|
||||
*
|
||||
* @param from inclusive lower bound. If negative, there is only an upper bound.
|
||||
* @param to exclusive upper bound.
|
||||
* @return the wait.
|
||||
*/
|
||||
public synchronized void wait(int from, int to)
|
||||
{
|
||||
long start = System.currentTimeMillis();
|
||||
long end = start + (from < 0 ? to : from + random.nextInt(to - from));
|
||||
|
||||
while (!finished && System.currentTimeMillis() < end)
|
||||
{
|
||||
try
|
||||
{
|
||||
long ms = end - System.currentTimeMillis();
|
||||
if (ms > 0)
|
||||
{
|
||||
wait(ms);
|
||||
}
|
||||
}
|
||||
catch (InterruptedException ignore)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Before
|
||||
public void before() throws Exception
|
||||
{
|
||||
nodeService = (NodeService) applicationContext.getBean("nodeService");
|
||||
contentService = (ContentService) applicationContext.getBean("contentService");
|
||||
dictionaryService = (DictionaryService) applicationContext.getBean("dictionaryService");
|
||||
mimetypeService = (MimetypeService) applicationContext.getBean("mimetypeService");
|
||||
namespacePrefixResolver = (NamespacePrefixResolver) applicationContext.getBean("namespaceService");
|
||||
transformerDebug = (TransformerDebug) applicationContext.getBean("transformerDebug");
|
||||
renditionService2 = (RenditionService2Impl) applicationContext.getBean("renditionService2");
|
||||
transactionService = (TransactionService) applicationContext.getBean("transactionService");
|
||||
transformServiceRegistry = (TransformServiceRegistry) applicationContext.getBean("transformServiceRegistry");
|
||||
taggingService = (TaggingService) applicationContext.getBean("taggingService");
|
||||
transformClient = (TransformClient) applicationContext.getBean("transformClient");
|
||||
|
||||
// Create an empty metadata extractor registry, so that if we add one it will be used
|
||||
metadataExtracterRegistry = new MetadataExtracterRegistry();
|
||||
|
||||
contentMetadataExtracter = new ContentMetadataExtracter();
|
||||
contentMetadataExtracter.setNodeService(nodeService);
|
||||
contentMetadataExtracter.setContentService(contentService);
|
||||
contentMetadataExtracter.setDictionaryService(dictionaryService);
|
||||
contentMetadataExtracter.setMetadataExtracterRegistry(metadataExtracterRegistry);
|
||||
contentMetadataExtracter.setApplicableTypes(new String[]{ContentModel.TYPE_CONTENT.toString()});
|
||||
contentMetadataExtracter.setCarryAspectProperties(true);
|
||||
|
||||
contentMetadataEmbedder = new ContentMetadataEmbedder();
|
||||
contentMetadataEmbedder.setNodeService(nodeService);
|
||||
contentMetadataEmbedder.setContentService(contentService);
|
||||
contentMetadataEmbedder.setMetadataExtracterRegistry(metadataExtracterRegistry);
|
||||
contentMetadataEmbedder.setApplicableTypes(new String[]{ContentModel.TYPE_CONTENT.toString()});
|
||||
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionHelper.RetryingTransactionCallback<Void>()
|
||||
{
|
||||
@Override
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
AuthenticationComponent authenticationComponent = (AuthenticationComponent) applicationContext.getBean("authenticationComponent");
|
||||
authenticationComponent.setSystemUserAsCurrentUser();
|
||||
|
||||
// Create the store and get the root node
|
||||
testStoreRef = nodeService.createStore(
|
||||
StoreRef.PROTOCOL_WORKSPACE,
|
||||
"Test_" + System.currentTimeMillis());
|
||||
rootNodeRef = nodeService.getRootNode(testStoreRef);
|
||||
|
||||
// Create the node used for tests
|
||||
nodeRef = nodeService.createNode(
|
||||
rootNodeRef, ContentModel.ASSOC_CHILDREN,
|
||||
QName.createQName("{test}testnode"),
|
||||
ContentModel.TYPE_CONTENT).getChildRef();
|
||||
|
||||
// Authenticate as the system user
|
||||
authenticationComponent.setSystemUserAsCurrentUser();
|
||||
|
||||
ContentWriter cw = contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
|
||||
cw.setMimetype(MimetypeMap.MIMETYPE_PDF);
|
||||
cw.putContent(AbstractContentTransformerTest.loadQuickTestFile("pdf"));
|
||||
|
||||
origProperties = nodeService.getProperties(nodeRef);
|
||||
nodeService.setProperties(nodeRef, origProperties);
|
||||
origProperties = new HashMap<>(origProperties); // just in case the contents changed.
|
||||
expectedProperties = new HashMap<>(origProperties); // ready to be modified.
|
||||
|
||||
origSize = getSize(nodeRef);
|
||||
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@After
|
||||
public void after() throws Exception
|
||||
{
|
||||
renditionService2.setTransformClient(transformClient);
|
||||
}
|
||||
|
||||
private void assertAsyncMetadataExecute(ActionExecuterAbstractBase executor, String mockResult,
|
||||
Integer changedHashcode, long expectedSize,
|
||||
Map<QName, Serializable> expectedProperties,
|
||||
QName... ignoreProperties) throws Exception
|
||||
{
|
||||
TestAsynchronousExtractor extractor = new TestAsynchronousExtractor(mockResult, changedHashcode);
|
||||
|
||||
executeAction(executor, extractor);
|
||||
assertContentSize(nodeRef, origSize, AFTER_CALLING_EXECUTE);
|
||||
assertProperties(nodeRef, origProperties, AFTER_CALLING_EXECUTE, ignoreProperties);
|
||||
|
||||
extractor.wait(-1, 10000);
|
||||
assertContentSize(nodeRef, expectedSize, AFTER_THE_TRANSFORM);
|
||||
assertProperties(nodeRef, expectedProperties, AFTER_THE_TRANSFORM, ignoreProperties);
|
||||
}
|
||||
|
||||
private void executeAction(ActionExecuterAbstractBase extractor, TestAsynchronousExtractor asynchronousExtractor)
|
||||
throws SystemException, NotSupportedException, HeuristicRollbackException, HeuristicMixedException, RollbackException
|
||||
{
|
||||
UserTransaction txn = transactionService.getUserTransaction();
|
||||
txn.begin();
|
||||
ActionImpl action = new ActionImpl(null, ID, SetPropertyValueActionExecuter.NAME, null);
|
||||
extractor.execute(action, nodeRef);
|
||||
txn.commit();
|
||||
}
|
||||
|
||||
void assertContentSize(NodeRef nodeRef, long expectSize, String state)
|
||||
{
|
||||
long size = getSize(nodeRef);
|
||||
if (expectSize == origSize)
|
||||
{
|
||||
assertEquals("The content should remain unchanged " + state, origSize, size);
|
||||
}
|
||||
else
|
||||
{
|
||||
assertEquals("The content should have changed " + state, expectSize, size);
|
||||
}
|
||||
}
|
||||
|
||||
private long getSize(NodeRef nodeRef)
|
||||
{
|
||||
ContentReader reader = contentService.getReader(nodeRef, ContentModel.PROP_CONTENT);
|
||||
return reader.getSize();
|
||||
}
|
||||
|
||||
private void assertProperties(NodeRef nodeRef, Map<QName, Serializable> expectProperties, String state,
|
||||
QName[] ignoreProperties)
|
||||
{
|
||||
properties = nodeService.getProperties(nodeRef);
|
||||
|
||||
// Work out the difference in a human readable form and ignore the 5 system set properties (as they always
|
||||
// change) plus any the caller has requested.
|
||||
StringJoiner sj = new StringJoiner("\n");
|
||||
List<QName> ignoreKeys = new ArrayList<>(asList(PROP_MODIFIED, PROP_MODIFIER, PROP_CONTENT, PROP_CREATED, PROP_CREATOR));
|
||||
ignoreKeys.addAll(asList(ignoreProperties));
|
||||
for (Map.Entry<QName, Serializable> entry : expectProperties.entrySet())
|
||||
{
|
||||
QName k = entry.getKey();
|
||||
Serializable v = entry.getValue();
|
||||
Serializable actual = properties.get(k);
|
||||
if (!ignoreKeys.contains(k) && !v.equals(actual))
|
||||
{
|
||||
sj.add(k + "\n Expected: " + v + "\n Was: " + actual);
|
||||
}
|
||||
}
|
||||
for (QName k : properties.keySet())
|
||||
{
|
||||
Serializable actual = properties.get(k);
|
||||
if (!ignoreKeys.contains(k) && !expectProperties.containsKey(k))
|
||||
{
|
||||
sj.add(k + "\n Expected: null\n Was: " + actual);
|
||||
}
|
||||
}
|
||||
|
||||
if (sj.length() != 0)
|
||||
{
|
||||
if (expectProperties.equals(origProperties))
|
||||
{
|
||||
fail("The properties should remain unchanged " + state + "\n" + sj);
|
||||
}
|
||||
else
|
||||
{
|
||||
fail("The properties should have changed " + state + "\n" + sj);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExtractHtml() throws Exception
|
||||
{
|
||||
expectedProperties.put(QName.createQName("cm:author", namespacePrefixResolver), "Nevin Nollop");
|
||||
expectedProperties.put(QName.createQName("cm:description", namespacePrefixResolver), "Gym class featuring a brown fox and lazy dog");
|
||||
expectedProperties.put(QName.createQName("cm:title", namespacePrefixResolver), "The quick brown fox jumps over the lazy dog");
|
||||
|
||||
assertAsyncMetadataExecute(contentMetadataExtracter, "quick/quick.html_metadata.json",
|
||||
UNCHANGED_HASHCODE, origSize, expectedProperties);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExtractNodeDeleted() throws Exception
|
||||
{
|
||||
assertAsyncMetadataExecute(contentMetadataExtracter, "quick/quick.html_metadata.json",
|
||||
SOURCE_HAS_NO_CONTENT, origSize, origProperties);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExtractContentChanged() throws Exception
|
||||
{
|
||||
assertAsyncMetadataExecute(contentMetadataExtracter, "quick/quick.html_metadata.json",
|
||||
1234, origSize, origProperties);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExtractTransformFailure() throws Exception
|
||||
{
|
||||
assertAsyncMetadataExecute(contentMetadataExtracter, null,
|
||||
UNCHANGED_HASHCODE, origSize, origProperties);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExtractTransformCorrupt() throws Exception
|
||||
{
|
||||
assertAsyncMetadataExecute(contentMetadataExtracter, "quick.html", // not json
|
||||
UNCHANGED_HASHCODE, origSize, origProperties);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUnknownNamespaceInResponse() throws Exception
|
||||
{
|
||||
// "sys:overwritePolicy": "PRAGMATIC" - is used
|
||||
// "{http://www.unknown}name": "ignored" - is reported in an ERROR log
|
||||
expectedProperties.put(QName.createQName("cm:author", namespacePrefixResolver), "Used");
|
||||
assertAsyncMetadataExecute(contentMetadataExtracter, "quick/unknown_namespace_metadata.json",
|
||||
UNCHANGED_HASHCODE, origSize, expectedProperties);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExtractMsg() throws Exception // has dates as RFC822
|
||||
{
|
||||
expectedProperties.put(QName.createQName("cm:addressee", namespacePrefixResolver), "mark.rogers@alfresco.com");
|
||||
expectedProperties.put(QName.createQName("cm:description", namespacePrefixResolver), "This is a quick test");
|
||||
expectedProperties.put(QName.createQName("cm:addressees", namespacePrefixResolver),
|
||||
new ArrayList<>(asList("mark.rogers@alfresco.com", "speedy@quick.com", "mrquick@nowhere.com")));
|
||||
|
||||
expectedProperties.put(QName.createQName("cm:sentdate", namespacePrefixResolver), SIMPLE_DATE_FORMAT.parse("Fri Jan 18 13:44:20 GMT 2013")); // 2013-01-18T13:44:20Z
|
||||
expectedProperties.put(QName.createQName("cm:subjectline", namespacePrefixResolver), "This is a quick test");
|
||||
expectedProperties.put(QName.createQName("cm:author", namespacePrefixResolver), "Mark Rogers");
|
||||
expectedProperties.put(QName.createQName("cm:originator", namespacePrefixResolver), "Mark Rogers");
|
||||
|
||||
assertAsyncMetadataExecute(contentMetadataExtracter, "quick/quick.msg_metadata.json",
|
||||
UNCHANGED_HASHCODE, origSize, expectedProperties);
|
||||
|
||||
Serializable sentDate = properties.get(QName.createQName("cm:sentdate", namespacePrefixResolver));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExtractEml() throws Exception // has dates as longs since 1970
|
||||
{
|
||||
expectedProperties.put(QName.createQName("cm:addressee", namespacePrefixResolver), "Nevin Nollop <nevin.nollop@gmail.com>");
|
||||
expectedProperties.put(QName.createQName("cm:description", namespacePrefixResolver), "The quick brown fox jumps over the lazy dog");
|
||||
expectedProperties.put(QName.createQName("cm:addressees", namespacePrefixResolver),
|
||||
new ArrayList<>(asList("Nevin Nollop <nevinn@alfresco.com>")));
|
||||
expectedProperties.put(QName.createQName("imap:dateSent", namespacePrefixResolver), SIMPLE_DATE_FORMAT.parse("Fri Jun 04 13:23:22 BST 2004"));
|
||||
expectedProperties.put(QName.createQName("imap:messageTo", namespacePrefixResolver), "Nevin Nollop <nevin.nollop@gmail.com>");
|
||||
expectedProperties.put(QName.createQName("imap:messageId", namespacePrefixResolver), "<20040604122322.GV1905@phoenix.home>");
|
||||
expectedProperties.put(QName.createQName("cm:title", namespacePrefixResolver), "The quick brown fox jumps over the lazy dog");
|
||||
expectedProperties.put(QName.createQName("imap:messageSubject", namespacePrefixResolver), "The quick brown fox jumps over the lazy dog");
|
||||
expectedProperties.put(QName.createQName("imap:messageCc", namespacePrefixResolver), "Nevin Nollop <nevinn@alfresco.com>");
|
||||
expectedProperties.put(QName.createQName("cm:sentdate", namespacePrefixResolver), SIMPLE_DATE_FORMAT.parse("Fri Jun 04 13:23:22 BST 2004"));
|
||||
expectedProperties.put(QName.createQName("cm:subjectline", namespacePrefixResolver), "The quick brown fox jumps over the lazy dog");
|
||||
expectedProperties.put(QName.createQName("imap:messageFrom", namespacePrefixResolver), "Nevin Nollop <nevin.nollop@alfresco.com>");
|
||||
expectedProperties.put(QName.createQName("cm:originator", namespacePrefixResolver), "Nevin Nollop <nevin.nollop@alfresco.com>");
|
||||
|
||||
// Note: As the metadata is for eml, an aspect gets added resulting in a second extract because of
|
||||
// ImapContentPolicy.onAddAspect. I cannot see a good way to avoid this.
|
||||
assertAsyncMetadataExecute(contentMetadataExtracter, "quick/quick.eml_metadata.json",
|
||||
UNCHANGED_HASHCODE, origSize, expectedProperties,
|
||||
// cm:author is not in the quick.eml_metadata.json but is being added by the second extract which thinks
|
||||
// the source mimetype is MimetypeMap.MIMETYPE_PDF, because that is what the before() method sets the
|
||||
// content to. As a result the PdfBox metadata extractor is called, which extracts cm:author. Given that
|
||||
// we don't know when this will take place, we simply ignore this property. We could fix this up, but it
|
||||
// does not add anything to the test.
|
||||
QName.createQName("cm:author", namespacePrefixResolver));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testEmbed() throws Exception
|
||||
{
|
||||
assertAsyncMetadataExecute(contentMetadataEmbedder, "quick/quick.html", // just replace the pdf with html!
|
||||
UNCHANGED_HASHCODE, 428, expectedProperties);
|
||||
}
|
||||
@Test
|
||||
public void testEmbedNodeDeleted() throws Exception
|
||||
{
|
||||
assertAsyncMetadataExecute(contentMetadataEmbedder, "quick/quick.html",
|
||||
SOURCE_HAS_NO_CONTENT, origSize, origProperties);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmbedContentChanged() throws Exception
|
||||
{
|
||||
assertAsyncMetadataExecute(contentMetadataEmbedder, "quick/quick.html",
|
||||
1234, origSize, origProperties);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmbedTransformFailure() throws Exception
|
||||
{
|
||||
assertAsyncMetadataExecute(contentMetadataEmbedder, null,
|
||||
UNCHANGED_HASHCODE, origSize, origProperties);
|
||||
}
|
||||
|
||||
// TODO Write tests for: overwritePolicy, enableStringTagging and carryAspectProperties.
|
||||
// Values are set in AsynchronousExtractor.setMetadata(...) but make use of original code within
|
||||
// MetadataExtracter and AbstractMappingMetadataExtracter.
|
||||
// As the tests for exiting extractors are to be removed in ACS 7.0, it is possible that they were being used
|
||||
// to test these values.
|
||||
}
|
@@ -177,5 +177,5 @@ public class DWGMetadataExtracterTest extends AbstractMetadataExtracterTest
|
||||
|
||||
assertEquals("Custom DWG property not found", "valueforcustomprop1", properties.get(TIKA_CUSTOM_TEST_PROPERTY));
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2016 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -102,8 +102,6 @@ import com.sun.mail.imap.protocol.RFC822DATA;
|
||||
import com.sun.mail.imap.protocol.UID;
|
||||
import com.sun.mail.util.ASCIIUtility;
|
||||
|
||||
import static org.alfresco.model.ContentModel.PROP_MODIFIED;
|
||||
|
||||
@Category({OwnJVMTestsCategory.class, LuceneTests.class})
|
||||
public class ImapMessageTest extends TestCase
|
||||
{
|
||||
@@ -525,30 +523,15 @@ public class ImapMessageTest extends TestCase
|
||||
messageHelper.addCc(address);
|
||||
|
||||
// Creating the message node in the repository
|
||||
UserTransaction txn = transactionService.getUserTransaction();
|
||||
txn.begin();
|
||||
String name = AlfrescoImapConst.MESSAGE_PREFIX + GUID.generate();
|
||||
FileInfo messageFile = fileFolderService.create(testImapFolderNodeRef, name, ContentModel.TYPE_CONTENT);
|
||||
// Writing a content.
|
||||
NodeRef nodeRef = messageFile.getNodeRef();
|
||||
Serializable origModified = getModified(nodeRef);
|
||||
new IncomingImapMessage(messageFile, serviceRegistry, message);
|
||||
txn.commit();
|
||||
|
||||
// Calls to new IncomingImapMessage(...) only takes place when a new nodeRef is being created.
|
||||
// No other code will be changing the nodeRef. An ImapModel.ASPECT_IMAP_CONTENT is added, which
|
||||
// triggers a metadata extract to take place in a post commit method. Previously this would have been a
|
||||
// synchronous process. This is no longer true as it may now take place in a T-Engine. So, we need to wait
|
||||
// for the extract to take place. There does not
|
||||
long end = System.currentTimeMillis()+10000;
|
||||
while (System.currentTimeMillis() <= end && origModified.equals(getModified(nodeRef)))
|
||||
{
|
||||
Thread.currentThread().sleep(1000);
|
||||
}
|
||||
|
||||
|
||||
// Getting the transformed properties from the repository
|
||||
// cm:originator, cm:addressee, cm:addressees, imap:messageFrom, imap:messageTo, imap:messageCc
|
||||
Map<QName, Serializable> properties = nodeService.getProperties(nodeRef);
|
||||
Map<QName, Serializable> properties = nodeService.getProperties(messageFile.getNodeRef());
|
||||
|
||||
String cmOriginator = (String) properties.get(ContentModel.PROP_ORIGINATOR);
|
||||
String cmAddressee = (String) properties.get(ContentModel.PROP_ADDRESSEE);
|
||||
@SuppressWarnings("unchecked")
|
||||
@@ -572,12 +555,6 @@ public class ImapMessageTest extends TestCase
|
||||
assertEquals(decodedAddress, imapMessageCc);
|
||||
}
|
||||
|
||||
private Serializable getModified(NodeRef nodeRef)
|
||||
{
|
||||
Map<QName, Serializable> origProperties = nodeService.getProperties(nodeRef);
|
||||
return origProperties.get(PROP_MODIFIED);
|
||||
}
|
||||
|
||||
@Category(RedundantTests.class)
|
||||
public void testEightBitMessage() throws Exception
|
||||
{
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2019 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -27,7 +27,6 @@ package org.alfresco.repo.rendition2;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.content.metadata.AsynchronousExtractor;
|
||||
import org.alfresco.repo.policy.BehaviourFilter;
|
||||
import org.alfresco.repo.policy.PolicyComponent;
|
||||
import org.alfresco.repo.rendition.RenditionPreventionRegistry;
|
||||
@@ -47,9 +46,11 @@ import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
|
||||
import org.mockito.junit.MockitoJUnitRunner;
|
||||
import org.quartz.CronExpression;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
@@ -60,6 +61,7 @@ import static org.junit.Assert.fail;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyInt;
|
||||
import static org.mockito.ArgumentMatchers.anyLong;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
/**
|
||||
@@ -88,7 +90,6 @@ public class RenditionService2Test
|
||||
@Mock private RuleService ruleService;
|
||||
@Mock private TransformServiceRegistryImpl transformServiceRegistry;
|
||||
@Mock private TransformReplyProvider transformReplyProvider;
|
||||
@Mock private AsynchronousExtractor asynchronousExtractor;
|
||||
|
||||
private NodeRef nodeRef = new NodeRef("workspace://spacesStore/test-id");
|
||||
private NodeRef nodeRefMissing = new NodeRef("workspace://spacesStore/bad-test-id");
|
||||
@@ -153,7 +154,6 @@ public class RenditionService2Test
|
||||
renditionService2.setTransformReplyProvider(transformReplyProvider);
|
||||
renditionService2.setEnabled(true);
|
||||
renditionService2.setThumbnailsEnabled(true);
|
||||
renditionService2.setAsynchronousExtractor(asynchronousExtractor);
|
||||
|
||||
renditionDefinitionRegistry2.setRenditionConfigDir("alfresco/renditions/test");
|
||||
renditionDefinitionRegistry2.afterPropertiesSet();
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* Copyright (C) 2019 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
@@ -242,13 +242,10 @@ public class LocalTransformServiceRegistryConfigTest extends TransformServiceReg
|
||||
targetMimetype.add("image/gif");
|
||||
targetMimetype.add("image/tiff");
|
||||
imagemagickSupportedTransformation.put("image/tiff", targetMimetype);
|
||||
targetMimetype = new ArrayList<>(targetMimetype);
|
||||
targetMimetype.add("image/png");
|
||||
targetMimetype.add("image/jpeg");
|
||||
imagemagickSupportedTransformation.put("image/gif", targetMimetype);
|
||||
imagemagickSupportedTransformation.put("image/jpeg", targetMimetype);
|
||||
targetMimetype = new ArrayList<>(targetMimetype);
|
||||
targetMimetype.add("alfresco-metadata-extract"); // Metadata extract and embed types should be excluded from pipeline cartesian products
|
||||
imagemagickSupportedTransformation.put("image/png", targetMimetype);
|
||||
targetMimetype = new ArrayList<>();
|
||||
targetMimetype.add("target1");
|
||||
@@ -336,7 +333,8 @@ public class LocalTransformServiceRegistryConfigTest extends TransformServiceReg
|
||||
@Override
|
||||
public void testJsonConfig() throws IOException
|
||||
{
|
||||
internalTestJsonConfig(64, 70);
|
||||
// Not 60, 60 as we have added source->target1..3 to three transformers
|
||||
internalTestJsonConfig(63, 69);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -370,7 +368,7 @@ public class LocalTransformServiceRegistryConfigTest extends TransformServiceReg
|
||||
switch (t.transformer.getTransformerName())
|
||||
{
|
||||
case "imagemagick":
|
||||
assertEquals(t.transformer.getTransformerName() + " incorrect number of supported transform", 18, t.transformer.getSupportedSourceAndTargetList().size());
|
||||
assertEquals(t.transformer.getTransformerName() + " incorrect number of supported transform", 17, t.transformer.getSupportedSourceAndTargetList().size());
|
||||
assertEquals( t.transformer.getTransformerName() + "incorrect number of transform option names", 1, t.transformer.getTransformOptions().size());
|
||||
assertEquals( t.transformer.getTransformerName() + "incorrect number of transform options", 6, countTopLevelOptions(t.transformer.getTransformOptions()));
|
||||
assertEquals(t.transformer.getTransformerName() + " expected to not be a transformer pipeline", t.transformer.getTransformerPipeline().size(), 0);
|
||||
@@ -430,7 +428,6 @@ public class LocalTransformServiceRegistryConfigTest extends TransformServiceReg
|
||||
break;
|
||||
|
||||
case "officeToImageViaPdf":
|
||||
// Note we will get 35 entries in getSupportedSourceAndTargetList() if the metadata transforms are not excluded
|
||||
assertEquals(t.transformer.getTransformerName() + " incorrect number of supported transform", 28, t.transformer.getSupportedSourceAndTargetList().size());
|
||||
assertEquals( t.transformer.getTransformerName() + "incorrect number of transform option names", 2, t.transformer.getTransformOptions().size());
|
||||
assertEquals( t.transformer.getTransformerName() + "incorrect number of transform options", 11, countTopLevelOptions(t.transformer.getTransformOptions()));
|
||||
|
Reference in New Issue
Block a user