mirror of
https://github.com/Alfresco/alfresco-transform-core.git
synced 2025-08-14 17:58:27 +00:00
REPO-4334 Move metadata extraction into T-Engines (#247)
* Metadata extract code added to T-Engines * Required a refactor of duplicate code to avoid 3x more duplication: - try catches used to return return exit codes - calls to java libraries or commands to external processes - building of transform options in controllers, adaptors * integration tests based on current extracts performed in the repo * included extract code for libreoffice, and embed code even though not used out of the box any more. There may well be custom extracts using them that move to T-Engines * removal of unused imports * minor autoOrient / allowEnlargement bug fixes that were not included in Paddington on the T-Engine side.
This commit is contained in:
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Transform Core
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2019 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* -
|
||||
@@ -26,25 +26,6 @@
|
||||
*/
|
||||
package org.alfresco.transformer;
|
||||
|
||||
import static java.util.stream.Collectors.joining;
|
||||
import static org.alfresco.transformer.fs.FileManager.TempFileProvider.createTempFile;
|
||||
import static org.alfresco.transformer.fs.FileManager.buildFile;
|
||||
import static org.alfresco.transformer.fs.FileManager.createTargetFileName;
|
||||
import static org.alfresco.transformer.fs.FileManager.deleteFile;
|
||||
import static org.alfresco.transformer.fs.FileManager.getFilenameFromContentDisposition;
|
||||
import static org.alfresco.transformer.fs.FileManager.save;
|
||||
import static org.alfresco.transformer.util.RequestParamMap.SOURCE_ENCODING;
|
||||
import static org.springframework.http.HttpStatus.BAD_REQUEST;
|
||||
import static org.springframework.http.HttpStatus.CREATED;
|
||||
import static org.springframework.http.HttpStatus.INTERNAL_SERVER_ERROR;
|
||||
import static org.springframework.http.HttpStatus.OK;
|
||||
import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE;
|
||||
import static org.springframework.util.StringUtils.getFilenameExtension;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.transform.client.model.TransformReply;
|
||||
import org.alfresco.transform.client.model.TransformRequest;
|
||||
import org.alfresco.transform.client.model.TransformRequestValidator;
|
||||
@@ -70,6 +51,40 @@ import org.springframework.web.bind.annotation.RequestBody;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.ResponseBody;
|
||||
import org.springframework.web.client.HttpClientErrorException;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import java.io.File;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.stream.Collectors.joining;
|
||||
import static org.alfresco.transformer.fs.FileManager.TempFileProvider.createTempFile;
|
||||
import static org.alfresco.transformer.fs.FileManager.buildFile;
|
||||
import static org.alfresco.transformer.fs.FileManager.createAttachment;
|
||||
import static org.alfresco.transformer.fs.FileManager.createSourceFile;
|
||||
import static org.alfresco.transformer.fs.FileManager.createTargetFile;
|
||||
import static org.alfresco.transformer.fs.FileManager.createTargetFileName;
|
||||
import static org.alfresco.transformer.fs.FileManager.deleteFile;
|
||||
import static org.alfresco.transformer.fs.FileManager.getFilenameFromContentDisposition;
|
||||
import static org.alfresco.transformer.fs.FileManager.save;
|
||||
import static org.alfresco.transformer.util.RequestParamMap.FILE;
|
||||
import static org.alfresco.transformer.util.RequestParamMap.SOURCE_ENCODING;
|
||||
import static org.alfresco.transformer.util.RequestParamMap.SOURCE_EXTENSION;
|
||||
import static org.alfresco.transformer.util.RequestParamMap.SOURCE_MIMETYPE;
|
||||
import static org.alfresco.transformer.util.RequestParamMap.TARGET_EXTENSION;
|
||||
import static org.alfresco.transformer.util.RequestParamMap.TARGET_MIMETYPE;
|
||||
import static org.alfresco.transformer.util.RequestParamMap.TEST_DELAY;
|
||||
import static org.alfresco.transformer.util.RequestParamMap.TRANSFORM_NAME_PROPERTY;
|
||||
import static org.springframework.http.HttpStatus.BAD_REQUEST;
|
||||
import static org.springframework.http.HttpStatus.CREATED;
|
||||
import static org.springframework.http.HttpStatus.INTERNAL_SERVER_ERROR;
|
||||
import static org.springframework.http.HttpStatus.OK;
|
||||
import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE;
|
||||
import static org.springframework.http.MediaType.MULTIPART_FORM_DATA_VALUE;
|
||||
import static org.springframework.util.StringUtils.getFilenameExtension;
|
||||
|
||||
/**
|
||||
* <p>Abstract Controller, provides structure and helper methods to sub-class transformer controllers.</p>
|
||||
@@ -104,6 +119,10 @@ public abstract class AbstractTransformerController implements TransformControll
|
||||
private static final Logger logger = LoggerFactory.getLogger(
|
||||
AbstractTransformerController.class);
|
||||
|
||||
// Request parameters that are not part of transform options
|
||||
public static final List<String> NON_TRANSFORM_OPTION_REQUEST_PARAMETERS = Arrays.asList(SOURCE_EXTENSION,
|
||||
TARGET_EXTENSION, TARGET_MIMETYPE, SOURCE_MIMETYPE, TEST_DELAY, TRANSFORM_NAME_PROPERTY);
|
||||
|
||||
@Autowired
|
||||
private AlfrescoSharedFileStoreClient alfrescoSharedFileStoreClient;
|
||||
|
||||
@@ -122,11 +141,56 @@ public abstract class AbstractTransformerController implements TransformControll
|
||||
return new ResponseEntity<>(transformConfig, OK);
|
||||
}
|
||||
|
||||
@PostMapping(value = "/transform", consumes = MULTIPART_FORM_DATA_VALUE)
|
||||
public ResponseEntity<Resource> transform(HttpServletRequest request,
|
||||
@RequestParam(FILE) MultipartFile sourceMultipartFile,
|
||||
@RequestParam(TARGET_EXTENSION) String targetExtension,
|
||||
@RequestParam(value = SOURCE_MIMETYPE, required = false) String sourceMimetype,
|
||||
@RequestParam(value = TARGET_MIMETYPE, required = false) String targetMimetype,
|
||||
@RequestParam Map<String, String> requestParameters,
|
||||
@RequestParam (value = TEST_DELAY, required = false) Long testDelay,
|
||||
|
||||
// The TRANSFORM_NAME_PROPERTY param allows ACS legacy transformers to specify which transform to use,
|
||||
// It can be removed once legacy transformers are removed from ACS.
|
||||
@RequestParam (value = TRANSFORM_NAME_PROPERTY, required = false) String requestTransformName)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Processing request via HTTP endpoint. Params: sourceMimetype: '{}', targetMimetype: '{}', "
|
||||
+ "targetExtension: '{}', requestParameters: {}", sourceMimetype, targetMimetype, targetExtension, requestParameters);
|
||||
}
|
||||
|
||||
final String targetFilename = createTargetFileName(
|
||||
sourceMultipartFile.getOriginalFilename(), targetExtension);
|
||||
getProbeTestTransform().incrementTransformerCount();
|
||||
final File sourceFile = createSourceFile(request, sourceMultipartFile);
|
||||
final File targetFile = createTargetFile(request, targetFilename);
|
||||
|
||||
Map<String, String> transformOptions = getTransformOptions(requestParameters);
|
||||
String transformName = getTransformerName(sourceMimetype, targetMimetype, requestTransformName, sourceFile, transformOptions);
|
||||
transform(transformName, sourceMimetype, targetMimetype, transformOptions, sourceFile, targetFile);
|
||||
|
||||
final ResponseEntity<Resource> body = createAttachment(targetFilename, targetFile);
|
||||
LogEntry.setTargetSize(targetFile.length());
|
||||
long time = LogEntry.setStatusCodeAndMessage(OK.value(), "Success");
|
||||
time += LogEntry.addDelay(testDelay);
|
||||
getProbeTestTransform().recordTransformTime(time);
|
||||
return body;
|
||||
}
|
||||
|
||||
protected Map<String, String> getTransformOptions(Map<String, String> requestParameters)
|
||||
{
|
||||
Map<String, String> transformOptions = new HashMap<>(requestParameters);
|
||||
transformOptions.keySet().removeAll(NON_TRANSFORM_OPTION_REQUEST_PARAMETERS);
|
||||
transformOptions.values().removeIf(v -> v.isEmpty());
|
||||
return transformOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* '/transform' endpoint which consumes and produces 'application/json'
|
||||
*
|
||||
* This is the way to tell Spring to redirect the request to this endpoint
|
||||
* instead of the older one, which produces 'html'
|
||||
* instead of the one which produces 'html'
|
||||
*
|
||||
* @param request The transformation request
|
||||
* @param timeout Transformation timeout
|
||||
@@ -339,6 +403,38 @@ public abstract class AbstractTransformerController implements TransformControll
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public void processTransform(final File sourceFile, final File targetFile,
|
||||
final String sourceMimetype, final String targetMimetype,
|
||||
final Map<String, String> transformOptions, final Long timeout)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug(
|
||||
"Processing request with: sourceFile '{}', targetFile '{}', transformOptions" +
|
||||
" '{}', timeout {} ms", sourceFile, targetFile, transformOptions, timeout);
|
||||
}
|
||||
|
||||
String transformName = getTransformerName(sourceFile, sourceMimetype, targetMimetype, transformOptions);
|
||||
transform(transformName, sourceMimetype, targetMimetype, transformOptions, sourceFile, targetFile);
|
||||
}
|
||||
|
||||
private String getTransformerName(String sourceMimetype, String targetMimetype,
|
||||
String requestTransformName, File sourceFile,
|
||||
Map<String, String> transformOptions)
|
||||
{
|
||||
// Check if transformName was provided in the request (this can happen for ACS legacy transformers)
|
||||
String transformName = requestTransformName;
|
||||
if (transformName == null || transformName.isEmpty())
|
||||
{
|
||||
transformName = getTransformerName(sourceFile, sourceMimetype, targetMimetype, transformOptions);
|
||||
}
|
||||
else if (logger.isInfoEnabled())
|
||||
{
|
||||
logger.info("Using transform name provided in the request: " + requestTransformName);
|
||||
}
|
||||
return transformName;
|
||||
}
|
||||
|
||||
protected String getTransformerName(final File sourceFile, final String sourceMimetype,
|
||||
final String targetMimetype, final Map<String, String> transformOptions)
|
||||
{
|
||||
@@ -387,4 +483,7 @@ public abstract class AbstractTransformerController implements TransformControll
|
||||
}
|
||||
return transformOptions;
|
||||
}
|
||||
|
||||
protected abstract void transform(String transformName, String sourceMimetype, String targetMimetype,
|
||||
Map<String, String> transformOptions, File sourceFile, File targetFile);
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Transform Core
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2019 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* -
|
||||
@@ -26,18 +26,18 @@
|
||||
*/
|
||||
package org.alfresco.transformer.executors;
|
||||
|
||||
import org.alfresco.transformer.logging.LogEntry;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.transformer.logging.LogEntry;
|
||||
|
||||
/**
|
||||
* Basic interface for executing transformations via Shell commands
|
||||
*
|
||||
* @author Cezar Leahu
|
||||
*/
|
||||
public interface CommandExecutor
|
||||
public interface CommandExecutor extends Transformer
|
||||
{
|
||||
void run(Map<String, String> properties, File targetFile, Long timeout);
|
||||
|
||||
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Transform Core
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2019 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* -
|
||||
@@ -28,14 +28,13 @@ package org.alfresco.transformer.executors;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import org.alfresco.transform.exceptions.TransformException;
|
||||
|
||||
/**
|
||||
* Basic interface for executing transformations inside Java/JVM
|
||||
* Basic interface for executing transformations inside Java/JVM.
|
||||
*
|
||||
* @author Cezar Leahu
|
||||
* @author adavis
|
||||
*/
|
||||
public interface JavaExecutor
|
||||
public interface JavaExecutor extends Transformer
|
||||
{
|
||||
void call(File sourceFile, File targetFile, String... args) throws TransformException;
|
||||
void call(File sourceFile, File targetFile, String... args) throws Exception;
|
||||
}
|
||||
|
@@ -0,0 +1,124 @@
|
||||
package org.alfresco.transformer.executors;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Transform Core
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* -
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
* -
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
* -
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
* -
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.alfresco.transform.exceptions.TransformException;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.alfresco.transformer.util.MimetypeMap.MIMETYPE_METADATA_EMBED;
|
||||
import static org.alfresco.transformer.util.MimetypeMap.MIMETYPE_METADATA_EXTRACT;
|
||||
import static org.alfresco.transformer.util.RequestParamMap.TRANSFORM_NAME_PARAMETER;
|
||||
import static org.springframework.http.HttpStatus.BAD_REQUEST;
|
||||
import static org.springframework.http.HttpStatus.INTERNAL_SERVER_ERROR;
|
||||
|
||||
/**
|
||||
* Basic interface for executing transforms and metadata extract or embed actions.
|
||||
*
|
||||
* @author adavis
|
||||
*/
|
||||
public interface Transformer
|
||||
{
|
||||
/**
|
||||
* @return A unique transformer id,
|
||||
*
|
||||
*/
|
||||
String getTransformerId();
|
||||
|
||||
default void transform(String sourceMimetype, String targetMimetype, Map<String, String> transformOptions,
|
||||
File sourceFile, File targetFile) throws TransformException
|
||||
{
|
||||
try
|
||||
{
|
||||
final String transformName = transformOptions.remove(TRANSFORM_NAME_PARAMETER);
|
||||
if (MIMETYPE_METADATA_EXTRACT.equals(targetMimetype))
|
||||
{
|
||||
extractMetadata(transformName, sourceMimetype, targetMimetype, transformOptions, sourceFile, targetFile);
|
||||
}
|
||||
else if (MIMETYPE_METADATA_EMBED.equals(targetMimetype))
|
||||
{
|
||||
embedMetadata(transformName, sourceMimetype, targetMimetype, transformOptions, sourceFile, targetFile);
|
||||
}
|
||||
else
|
||||
{
|
||||
transform(transformName, sourceMimetype, targetMimetype, transformOptions, sourceFile, targetFile);
|
||||
}
|
||||
}
|
||||
catch (TransformException e)
|
||||
{
|
||||
throw e;
|
||||
}
|
||||
catch (IllegalArgumentException e)
|
||||
{
|
||||
throw new TransformException(BAD_REQUEST.value(), getMessage(e));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
throw new TransformException(INTERNAL_SERVER_ERROR.value(), getMessage(e));
|
||||
}
|
||||
if (!targetFile.exists())
|
||||
{
|
||||
throw new TransformException(INTERNAL_SERVER_ERROR.value(),
|
||||
"Transformer failed to create an output file. Target file does not exist.");
|
||||
}
|
||||
if (sourceFile.length() > 0 && targetFile.length() == 0)
|
||||
{
|
||||
throw new TransformException(INTERNAL_SERVER_ERROR.value(),
|
||||
"Transformer failed to create an output file. Target file is empty but source file was not empty.");
|
||||
}
|
||||
}
|
||||
|
||||
private static String getMessage(Exception e)
|
||||
{
|
||||
return e.getMessage() == null ? e.getClass().getSimpleName() : e.getMessage();
|
||||
}
|
||||
|
||||
default void transform(String transformName, String sourceMimetype, String targetMimetype,
|
||||
Map<String, String> transformOptions,
|
||||
File sourceFile, File targetFile) throws Exception
|
||||
{
|
||||
}
|
||||
|
||||
default void extractMetadata(String transformName, String sourceMimetype, String targetMimetype,
|
||||
Map<String, String> transformOptions,
|
||||
File sourceFile, File targetFile) throws Exception
|
||||
{
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated The content repository has no non test embed metadata implementations.
|
||||
* This code exists in case there are custom implementations, that need to be converted to T-Engines.
|
||||
* It is simply a copy and paste from the content repository and has received limited testing.
|
||||
*/
|
||||
default void embedMetadata(String transformName, String sourceMimetype, String targetMimetype,
|
||||
Map<String, String> transformOptions,
|
||||
File sourceFile, File targetFile) throws Exception
|
||||
{
|
||||
}
|
||||
}
|
@@ -0,0 +1,499 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Transform Core
|
||||
* %%
|
||||
* Copyright (C) 2005-2020 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* -
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
* -
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
* -
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
* -
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.transformer.metadataExtractors;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.slf4j.Logger;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.Serializable;
|
||||
import java.lang.reflect.Array;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
import java.util.StringTokenizer;
|
||||
|
||||
/**
|
||||
* Helper methods for metadata extract and embed.
|
||||
* <p>
|
||||
* <i>Much of the code is based on AbstractMappingMetadataExtracter from the
|
||||
* content repository. The code has been simplified to only set up mapping one way.</i>
|
||||
* <p>
|
||||
* If a transform specifies that it can convert from {@code "<MIMETYPE>"} to {@code "alfresco-metadata-extract"}
|
||||
* (specified in the {@code engine_config.json}), it is indicating that it can extract metadata from {@code <MIMETYPE>}.
|
||||
*
|
||||
* The transform results in a Map of extracted properties encoded as json being returned to the content repository.
|
||||
* <ul>
|
||||
* <li>The content repository will use a transform in preference to any metadata extractors it might have defined
|
||||
* locally for the same MIMETYPE.</li>
|
||||
* <li>The T-Engine's Controller class will call a method in a class that extends {@link AbstractMetadataExtractor}
|
||||
* based on the source and target mediatypes in the normal way.</li>
|
||||
* <li>The method extracts ALL available metadata is extracted from the document and then calls
|
||||
* {@link #mapMetadataAndWrite(File, Map)}.</li>
|
||||
* <li>Selected values from the available metadata are mapped into content repository property names and values,
|
||||
* depending on what is defined in a {@code "<classname>_metadata_extract.properties"} file.</li>
|
||||
* <li>The selected values are set back to the content repository as a JSON representation of a Map, where the values
|
||||
* are applied to the source node.</li>
|
||||
* </ul>
|
||||
* To support the same functionality as metadata extractors configured inside the content repository,
|
||||
* extra key value pairs may be returned from {@link #extractMetadata}. These are:
|
||||
* <ul>
|
||||
* <li>{@code "sys:overwritePolicy"} which can specify the
|
||||
* {@code org.alfresco.repo.content.metadata.MetadataExtracter.OverwritePolicy} name. Defaults to "PRAGMATIC".</li>
|
||||
* <li>{@code "sys:enableStringTagging"} if {@code "true"} finds or creates tags for each string mapped to
|
||||
* {@code cm:taggable}. Defaults to {@code "false"} to ignore mapping strings to tags.</li>
|
||||
* <li>{@code "sys:carryAspectProperties"} </li>
|
||||
* <li>{@code "sys:stringTaggingSeparators"} </li>
|
||||
* </ul>
|
||||
*
|
||||
* If a transform specifies that it can convert from {@code "<MIMETYPE>"} to {@code "alfresco-metadata-embed"}, it is
|
||||
* indicating that it can embed metadata in {@code <MIMETYPE>}.
|
||||
*
|
||||
* The transform results in a new version of supplied source file that contains the metadata supplied in the transform
|
||||
* options.
|
||||
*
|
||||
* @author Jesper Steen Møller
|
||||
* @author Derek Hulley
|
||||
* @author adavis
|
||||
*/
|
||||
public abstract class AbstractMetadataExtractor
|
||||
{
|
||||
private static final String EXTRACT = "extract";
|
||||
private static final String EMBED = "embed";
|
||||
private static final String METADATA = "metadata";
|
||||
|
||||
private static final String NAMESPACE_PROPERTY_PREFIX = "namespace.prefix.";
|
||||
private static final char NAMESPACE_PREFIX = ':';
|
||||
private static final char NAMESPACE_BEGIN = '{';
|
||||
private static final char NAMESPACE_END = '}';
|
||||
|
||||
private static final List<String> SYS_PROPERTIES = Arrays.asList(
|
||||
"sys:overwritePolicy",
|
||||
"sys:enableStringTagging",
|
||||
"sys:carryAspectProperties",
|
||||
"sys:stringTaggingSeparators");
|
||||
|
||||
private static final ObjectMapper jsonObjectMapper = new ObjectMapper();
|
||||
|
||||
protected final Logger logger;
|
||||
private Map<String, Set<String>> extractMapping;
|
||||
private Map<String, Set<String>> embedMapping;
|
||||
|
||||
public AbstractMetadataExtractor(Logger logger)
|
||||
{
|
||||
this.logger = logger;
|
||||
extractMapping = Collections.emptyMap();
|
||||
embedMapping = Collections.emptyMap();
|
||||
try
|
||||
{
|
||||
extractMapping = buildExtractMapping();
|
||||
embedMapping = buildEmbedMapping();
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
logger.error("Failed to read config", e);
|
||||
}
|
||||
}
|
||||
|
||||
public abstract Map<String, Serializable> extractMetadata(String sourceMimetype, Map<String, String> transformOptions,
|
||||
File sourceFile) throws Exception;
|
||||
|
||||
public void embedMetadata(String sourceMimetype, String targetMimetype, Map<String, String> transformOptions,
|
||||
File sourceFile, File targetFile) throws Exception
|
||||
{
|
||||
// Default nothing, as embedding is not supported in most cases
|
||||
}
|
||||
|
||||
protected Map<String, String> getMetadata(Map<String, String> transformOptions)
|
||||
{
|
||||
String metadataAsJson = transformOptions.get(METADATA);
|
||||
if (metadataAsJson == null)
|
||||
{
|
||||
throw new IllegalArgumentException("No metadata in embed request");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
TypeReference<HashMap<String, String>> typeRef = new TypeReference<HashMap<String, String>>() {};
|
||||
return jsonObjectMapper.readValue(metadataAsJson, typeRef);
|
||||
}
|
||||
catch (JsonProcessingException e)
|
||||
{
|
||||
throw new IllegalArgumentException("Failed to read metadata from request", e);
|
||||
}
|
||||
}
|
||||
|
||||
protected Map<String, Set<String>> getExtractMapping()
|
||||
{
|
||||
return Collections.unmodifiableMap(extractMapping);
|
||||
}
|
||||
|
||||
public Map<String, Set<String>> getEmbedMapping()
|
||||
{
|
||||
return Collections.unmodifiableMap(embedMapping);
|
||||
}
|
||||
|
||||
/**
|
||||
* Based on AbstractMappingMetadataExtracter#getDefaultMapping.
|
||||
*
|
||||
* This method provides a <i>mapping</i> of where to store the values extracted from the documents. The list of
|
||||
* properties need <b>not</b> include all metadata values extracted from the document. This mapping should be
|
||||
* defined in a file based on the class name: {@code "<classname>_metadata_extract.properties"}
|
||||
* @return Returns a static mapping. It may not be null.
|
||||
*/
|
||||
private Map<String, Set<String>> buildExtractMapping()
|
||||
{
|
||||
String filename = getPropertiesFilename(EXTRACT);
|
||||
Properties properties = readProperties(filename);
|
||||
if (properties == null)
|
||||
{
|
||||
logger.error("Failed to read "+filename);
|
||||
}
|
||||
|
||||
Map<String, String> namespacesByPrefix = getNamespaces(properties);
|
||||
return buildExtractMapping(properties, namespacesByPrefix);
|
||||
}
|
||||
|
||||
private Map<String, Set<String>> buildExtractMapping(Properties properties, Map<String, String> namespacesByPrefix)
|
||||
{
|
||||
// Create the mapping
|
||||
Map<String, Set<String>> convertedMapping = new HashMap<>(17);
|
||||
for (Map.Entry<Object, Object> entry : properties.entrySet())
|
||||
{
|
||||
String documentProperty = (String) entry.getKey();
|
||||
String qnamesStr = (String) entry.getValue();
|
||||
if (documentProperty.startsWith(NAMESPACE_PROPERTY_PREFIX))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
// Create the entry
|
||||
Set<String> qnames = new HashSet<>(3);
|
||||
convertedMapping.put(documentProperty, qnames);
|
||||
// The to value can be a list of QNames
|
||||
StringTokenizer tokenizer = new StringTokenizer(qnamesStr, ",");
|
||||
while (tokenizer.hasMoreTokens())
|
||||
{
|
||||
String qnameStr = tokenizer.nextToken().trim();
|
||||
qnameStr = getQNameString(namespacesByPrefix, entry, qnameStr, EXTRACT);
|
||||
qnames.add(qnameStr);
|
||||
}
|
||||
if (logger.isTraceEnabled())
|
||||
{
|
||||
logger.trace("Added mapping from " + documentProperty + " to " + qnames);
|
||||
}
|
||||
}
|
||||
return convertedMapping;
|
||||
}
|
||||
|
||||
/**
|
||||
* Based on AbstractMappingMetadataExtracter#getDefaultEmbedMapping.
|
||||
*
|
||||
* This method provides a <i>mapping</i> of model properties that should be embedded in the content. The list of
|
||||
* properties need <b>not</b> include all properties. This mapping should be defined in a file based on the class
|
||||
* name: {@code "<classname>_metadata_embed.properties"}
|
||||
* <p>
|
||||
* If no {@code "<classname>_metadata_embed.properties"} file is found, a reverse of the
|
||||
* {@code "<classname>_metadata_extract.properties"} will be assumed. A last win approach will be used for handling
|
||||
* duplicates.
|
||||
* @return Returns a static mapping. It may not be null.
|
||||
*/
|
||||
private Map<String, Set<String>> buildEmbedMapping()
|
||||
{
|
||||
String filename = getPropertiesFilename(EMBED);
|
||||
Properties properties = readProperties(filename);
|
||||
|
||||
Map<String, Set<String>> embedMapping;
|
||||
if (properties != null)
|
||||
{
|
||||
Map<String, String> namespacesByPrefix = getNamespaces(properties);
|
||||
embedMapping = buildEmbedMapping(properties, namespacesByPrefix);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("No " + filename + ", assuming reverse of extract mapping");
|
||||
}
|
||||
embedMapping = buildEmbedMappingByReversingExtract();
|
||||
}
|
||||
return embedMapping;
|
||||
}
|
||||
|
||||
private Map<String, Set<String>> buildEmbedMapping(Properties properties, Map<String, String> namespacesByPrefix)
|
||||
{
|
||||
Map<String, Set<String>> convertedMapping = new HashMap<>(17);
|
||||
for (Map.Entry<Object, Object> entry : properties.entrySet())
|
||||
{
|
||||
String modelProperty = (String) entry.getKey();
|
||||
String metadataKeysString = (String) entry.getValue();
|
||||
if (modelProperty.startsWith(NAMESPACE_PROPERTY_PREFIX))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
modelProperty = getQNameString(namespacesByPrefix, entry, modelProperty, EMBED);
|
||||
String[] metadataKeysArray = metadataKeysString.split(",");
|
||||
Set<String> metadataKeys = new HashSet<String>(metadataKeysArray.length);
|
||||
for (String metadataKey : metadataKeysArray) {
|
||||
metadataKeys.add(metadataKey.trim());
|
||||
}
|
||||
// Create the entry
|
||||
convertedMapping.put(modelProperty, metadataKeys);
|
||||
if (logger.isTraceEnabled())
|
||||
{
|
||||
logger.trace("Added mapping from " + modelProperty + " to " + metadataKeysString);
|
||||
}
|
||||
}
|
||||
return convertedMapping;
|
||||
}
|
||||
|
||||
private Map<String, Set<String>> buildEmbedMappingByReversingExtract()
|
||||
{
|
||||
Map<String, Set<String>> extractMapping = buildExtractMapping();
|
||||
Map<String, Set<String>> embedMapping;
|
||||
embedMapping = new HashMap<>(extractMapping.size());
|
||||
for (String metadataKey : extractMapping.keySet())
|
||||
{
|
||||
if (extractMapping.get(metadataKey) != null && extractMapping.get(metadataKey).size() > 0)
|
||||
{
|
||||
String modelProperty = extractMapping.get(metadataKey).iterator().next();
|
||||
Set<String> metadataKeys = embedMapping.get(modelProperty);
|
||||
if (metadataKeys == null)
|
||||
{
|
||||
metadataKeys = new HashSet<String>(1);
|
||||
embedMapping.put(modelProperty, metadataKeys);
|
||||
}
|
||||
metadataKeys.add(metadataKey);
|
||||
if (logger.isTraceEnabled())
|
||||
{
|
||||
logger.trace("Added mapping from " + modelProperty + " to " + metadataKeys.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
return embedMapping;
|
||||
}
|
||||
|
||||
private String getPropertiesFilename(String suffix)
|
||||
{
|
||||
String className = this.getClass().getName();
|
||||
String shortClassName = className.split("\\.")[className.split("\\.").length - 1];
|
||||
shortClassName = shortClassName.replace('$', '-');
|
||||
|
||||
return shortClassName + "_metadata_" + suffix + ".properties";
|
||||
}
|
||||
|
||||
private Properties readProperties(String filename)
|
||||
{
|
||||
Properties properties = null;
|
||||
try
|
||||
{
|
||||
InputStream inputStream = AbstractMetadataExtractor.class.getClassLoader().getResourceAsStream(filename);
|
||||
if (inputStream != null)
|
||||
{
|
||||
properties = new Properties();
|
||||
properties.load(inputStream);
|
||||
}
|
||||
}
|
||||
catch (IOException ignore)
|
||||
{
|
||||
}
|
||||
return properties;
|
||||
}
|
||||
|
||||
private Map<String, String> getNamespaces(Properties properties)
|
||||
{
|
||||
Map<String, String> namespacesByPrefix = new HashMap<String, String>(5);
|
||||
for (Map.Entry<Object, Object> entry : properties.entrySet())
|
||||
{
|
||||
String propertyName = (String) entry.getKey();
|
||||
if (propertyName.startsWith(NAMESPACE_PROPERTY_PREFIX))
|
||||
{
|
||||
String prefix = propertyName.substring(17);
|
||||
String namespace = (String) entry.getValue();
|
||||
namespacesByPrefix.put(prefix, namespace);
|
||||
}
|
||||
}
|
||||
return namespacesByPrefix;
|
||||
}
|
||||
|
||||
private String getQNameString(Map<String, String> namespacesByPrefix, Map.Entry<Object, Object> entry, String qnameStr, String type)
|
||||
{
|
||||
// Check if we need to resolve a namespace reference
|
||||
int index = qnameStr.indexOf(NAMESPACE_PREFIX);
|
||||
if (index > -1 && qnameStr.charAt(0) != NAMESPACE_BEGIN)
|
||||
{
|
||||
String prefix = qnameStr.substring(0, index);
|
||||
String suffix = qnameStr.substring(index + 1);
|
||||
// It is prefixed
|
||||
String uri = namespacesByPrefix.get(prefix);
|
||||
if (uri == null)
|
||||
{
|
||||
throw new IllegalArgumentException("No prefix mapping for " + type + " property mapping: \n" +
|
||||
" Extractor: " + this + "\n" +
|
||||
" Mapping: " + entry);
|
||||
}
|
||||
qnameStr = NAMESPACE_BEGIN + uri + NAMESPACE_END + suffix;
|
||||
}
|
||||
return qnameStr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a value to the map, conserving null values. Values are converted to null if:
|
||||
* <ul>
|
||||
* <li>it is an empty string value after trimming</li>
|
||||
* <li>it is an empty collection</li>
|
||||
* <li>it is an empty array</li>
|
||||
* </ul>
|
||||
* String values are trimmed before being put into the map.
|
||||
* Otherwise, it is up to the extracter to ensure that the value is a <tt>Serializable</tt>.
|
||||
* It is not appropriate to implicitly convert values in order to make them <tt>Serializable</tt>
|
||||
* - the best conversion method will depend on the value's specific meaning.
|
||||
*
|
||||
* @param key the destination key
|
||||
* @param value the serializable value
|
||||
* @param destination the map to put values into
|
||||
* @return Returns <tt>true</tt> if set, otherwise <tt>false</tt>
|
||||
*/
|
||||
// Copied from the content repository's AbstractMappingMetadataExtracter.
|
||||
protected boolean putRawValue(String key, Serializable value, Map<String, Serializable> destination)
|
||||
{
|
||||
if (value == null)
|
||||
{
|
||||
// Just keep this
|
||||
}
|
||||
else if (value instanceof String)
|
||||
{
|
||||
String valueStr = ((String) value).trim();
|
||||
if (valueStr.length() == 0)
|
||||
{
|
||||
value = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (valueStr.indexOf("\u0000") != -1)
|
||||
{
|
||||
valueStr = valueStr.replaceAll("\u0000", "");
|
||||
}
|
||||
// Keep the trimmed value
|
||||
value = valueStr;
|
||||
}
|
||||
}
|
||||
else if (value instanceof Collection)
|
||||
{
|
||||
Collection<?> valueCollection = (Collection<?>) value;
|
||||
if (valueCollection.isEmpty())
|
||||
{
|
||||
value = null;
|
||||
}
|
||||
}
|
||||
else if (value.getClass().isArray())
|
||||
{
|
||||
if (Array.getLength(value) == 0)
|
||||
{
|
||||
value = null;
|
||||
}
|
||||
}
|
||||
// It passed all the tests
|
||||
destination.put(key, value);
|
||||
return true;
|
||||
}
|
||||
|
||||
public void mapMetadataAndWrite(File targetFile, Map<String, Serializable> metadata) throws IOException
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Raw metadata:");
|
||||
metadata.forEach((k,v) -> logger.debug(" "+k+"="+v));
|
||||
}
|
||||
|
||||
metadata = mapRawToSystem(metadata);
|
||||
writeMetadata(targetFile, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* Based on AbstractMappingMetadataExtracter#mapRawToSystem.
|
||||
*
|
||||
* @param rawMetadata Metadata keyed by document properties
|
||||
* @return Returns the metadata keyed by the system properties
|
||||
*/
|
||||
private Map<String, Serializable> mapRawToSystem(Map<String, Serializable> rawMetadata)
|
||||
{
|
||||
boolean debugEnabled = logger.isDebugEnabled();
|
||||
if (debugEnabled)
|
||||
{
|
||||
logger.debug("Returned metadata:");
|
||||
}
|
||||
Map<String, Serializable> systemProperties = new HashMap<String, Serializable>(rawMetadata.size() * 2 + 1);
|
||||
for (Map.Entry<String, Serializable> entry : rawMetadata.entrySet())
|
||||
{
|
||||
String documentKey = entry.getKey();
|
||||
Serializable documentValue = entry.getValue();
|
||||
if (SYS_PROPERTIES.contains(documentKey))
|
||||
{
|
||||
systemProperties.put(documentKey, documentValue);
|
||||
if (debugEnabled)
|
||||
{
|
||||
logger.debug(" " + documentKey + "=" + documentValue);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// Check if there is a mapping for this
|
||||
if (!extractMapping.containsKey(documentKey))
|
||||
{
|
||||
// No mapping - ignore
|
||||
continue;
|
||||
}
|
||||
|
||||
Set<String> systemQNames = extractMapping.get(documentKey);
|
||||
for (String systemQName : systemQNames)
|
||||
{
|
||||
if (debugEnabled)
|
||||
{
|
||||
logger.debug(" "+systemQName+"="+documentValue+" ("+documentKey+")");
|
||||
}
|
||||
systemProperties.put(systemQName, documentValue);
|
||||
}
|
||||
}
|
||||
return systemProperties;
|
||||
}
|
||||
|
||||
private void writeMetadata(File targetFile, Map<String, Serializable> results)
|
||||
throws IOException
|
||||
{
|
||||
jsonObjectMapper.writeValue(targetFile, results);
|
||||
}
|
||||
}
|
@@ -2,7 +2,7 @@
|
||||
* #%L
|
||||
* Alfresco Transform Core
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2019 Alfresco Software Limited
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* -
|
||||
@@ -31,6 +31,9 @@ package org.alfresco.transformer.util;
|
||||
*/
|
||||
public interface MimetypeMap
|
||||
{
|
||||
String MIMETYPE_METADATA_EXTRACT = "alfresco-metadata-extract";
|
||||
String MIMETYPE_METADATA_EMBED = "alfresco-metadata-embed";
|
||||
|
||||
String PREFIX_APPLICATION = "application/";
|
||||
String PREFIX_AUDIO = "audio/";
|
||||
String PREFIX_IMAGE = "image/";
|
||||
|
@@ -26,13 +26,25 @@
|
||||
*/
|
||||
package org.alfresco.transformer.util;
|
||||
|
||||
public interface RequestParamMap
|
||||
public interface RequestParamMap
|
||||
{
|
||||
// This property can be sent by acs repository's legacy transformers to force a transform,
|
||||
// instead of letting this T-Engine determine it based on the request parameters.
|
||||
// This allows clients to specify transform names as they appear in the engine config files, for example:
|
||||
// imagemagick, libreoffice, PdfBox, TikaAuto, ....
|
||||
// See ATS-731.
|
||||
@Deprecated
|
||||
String TRANSFORM_NAME_PROPERTY = "transformName";
|
||||
|
||||
String TRANSFORM_NAME_PARAMETER = "alfresco.transform-name-parameter";
|
||||
String FILE = "file";
|
||||
|
||||
String SOURCE_ENCODING = "sourceEncoding";
|
||||
String SOURCE_EXTENSION = "sourceExtension";
|
||||
String SOURCE_MIMETYPE = "sourceMimetype";
|
||||
String TARGET_EXTENSION = "targetExtension";
|
||||
String TARGET_MIMETYPE = "targetMimetype";
|
||||
String TARGET_ENCODING = "targetEncoding";
|
||||
String TEST_DELAY = "testDelay";
|
||||
String PAGE_REQUEST_PARAM = "page";
|
||||
String WIDTH_REQUEST_PARAM = "width";
|
||||
@@ -56,5 +68,8 @@ public interface RequestParamMap
|
||||
String ALLOW_ENLARGEMENT = "allowEnlargement";
|
||||
String MAINTAIN_ASPECT_RATIO = "maintainAspectRatio";
|
||||
String COMMAND_OPTIONS = "commandOptions";
|
||||
String TIMEOUT = "timeOut";
|
||||
String TIMEOUT = "timeout";
|
||||
String INCLUDE_CONTENTS = "includeContents";
|
||||
String NOT_EXTRACT_BOOKMARK_TEXT = "notExtractBookmarksText";
|
||||
String PAGE_LIMIT = "pageLimit";
|
||||
}
|
||||
|
@@ -0,0 +1,119 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Transform Core
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2020 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* -
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
* -
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
* -
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
* -
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.transformer;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.junit.Test;
|
||||
import org.springframework.core.io.Resource;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.text.MessageFormat.format;
|
||||
import static org.alfresco.transformer.EngineClient.sendTRequest;
|
||||
import static org.alfresco.transformer.util.MimetypeMap.MIMETYPE_METADATA_EXTRACT;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.springframework.http.HttpStatus.OK;
|
||||
|
||||
/**
|
||||
* Super class of metadata integration tests. Sub classes should add the list of test files to
|
||||
* {@code @Parameterized.Parameters public static Set<TestFileInfo> engineTransformations()} and provide
|
||||
* expected json files (<sourceFilename>"_metadata.json") as resources on the classpath.
|
||||
*
|
||||
* @author adavis
|
||||
*/
|
||||
public abstract class AbstractMetadataExtractsIT
|
||||
{
|
||||
private static final String ENGINE_URL = "http://localhost:8090";
|
||||
// These are normally variable, hence the lowercase.
|
||||
private static final String targetMimetype = MIMETYPE_METADATA_EXTRACT;
|
||||
private static final String targetExtension = "json";
|
||||
protected final String sourceMimetype;
|
||||
protected final String sourceFile;
|
||||
private final ObjectMapper jsonObjectMapper = new ObjectMapper();
|
||||
|
||||
public AbstractMetadataExtractsIT(TestFileInfo testFileInfo)
|
||||
{
|
||||
sourceMimetype = testFileInfo.getMimeType();
|
||||
sourceFile = testFileInfo.getPath();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTransformation()
|
||||
{
|
||||
final String descriptor = format("Transform ({0}, {1} -> {2}, {3})",
|
||||
sourceFile, sourceMimetype, targetMimetype, targetExtension);
|
||||
|
||||
try
|
||||
{
|
||||
final ResponseEntity<Resource> response = sendTRequest(ENGINE_URL, sourceFile,
|
||||
sourceMimetype, targetMimetype, targetExtension);
|
||||
assertEquals(descriptor, OK, response.getStatusCode());
|
||||
|
||||
String metadataFilename = sourceFile + "_metadata.json";
|
||||
Map<String, Serializable> actualMetadata = readMetadata(response.getBody().getInputStream());
|
||||
File actualMetadataFile = new File(metadataFilename);
|
||||
jsonObjectMapper.writerWithDefaultPrettyPrinter().writeValue(actualMetadataFile, actualMetadata);
|
||||
|
||||
Map<String, Serializable> expectedMetadata = readExpectedMetadata(metadataFilename, actualMetadataFile);
|
||||
assertEquals("The metadata did not match the expected value. It has been saved in "+actualMetadataFile.getAbsolutePath(),
|
||||
expectedMetadata, actualMetadata);
|
||||
actualMetadataFile.delete();
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
e.printStackTrace();
|
||||
fail(descriptor + " exception: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private Map<String, Serializable> readExpectedMetadata(String filename, File actualMetadataFile) throws IOException
|
||||
{
|
||||
try (InputStream inputStream = this.getClass().getClassLoader().getResourceAsStream(filename))
|
||||
{
|
||||
if (inputStream == null)
|
||||
{
|
||||
fail("The expected metadata file "+filename+" did not exist.\n"+
|
||||
"The actual metadata has been saved in "+actualMetadataFile.getAbsoluteFile());
|
||||
}
|
||||
return readMetadata(inputStream);
|
||||
}
|
||||
}
|
||||
|
||||
private Map<String, Serializable> readMetadata(InputStream inputStream) throws IOException
|
||||
{
|
||||
TypeReference<HashMap<String, Serializable>> typeRef = new TypeReference<HashMap<String, Serializable>>() {};
|
||||
return jsonObjectMapper.readValue(inputStream, typeRef);
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user