mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-08-07 17:49:17 +00:00
REPO-3681 Renditions: Record HeartBeat data from rendition V1 REST API (#76)
This commit is contained in:
@@ -0,0 +1,139 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2018 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.heartbeat;
|
||||
|
||||
import org.alfresco.heartbeat.datasender.HBData;
|
||||
import org.alfresco.heartbeat.jobs.HeartBeatJobScheduler;
|
||||
import org.alfresco.repo.descriptor.DescriptorDAO;
|
||||
import org.alfresco.repo.thumbnail.ThumbnailDefinition;
|
||||
import org.alfresco.util.PropertyCheck;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
/**
|
||||
* This class collects rendition request counts for HeartBeat. A rendition (such as "doclib") is always to the same
|
||||
* target mimetype, but there may be different source mimetypes. As a result that may be multiple sets of data with
|
||||
* the same rendition. It is also likely there will be multiple renditions reported in the same batch of data.
|
||||
* <ul>
|
||||
* <li>Collector ID: <b>acs.repository.renditions</b></li>
|
||||
* <li>Data:
|
||||
* <ul>
|
||||
* <li><b>rendition:</b> String - The name of the rendition.</li>
|
||||
* <li><b>count:</b> Integer - The number of times a rendition and sourceMimetype combination has been requested.</li>
|
||||
* <li><b>sourceMimetype:</b> String - The source mimetype for the rendition.</li>
|
||||
* <li><b>targetMimetype:</b> String - The target mimetype for the rendition.</li>
|
||||
* </ul>
|
||||
* </li>
|
||||
* </ul>
|
||||
*
|
||||
* @author adavis
|
||||
*/
|
||||
public class RenditionsDataCollector extends HBBaseDataCollector implements InitializingBean
|
||||
{
|
||||
private static final Log logger = LogFactory.getLog(RenditionsDataCollector.class);
|
||||
|
||||
private DescriptorDAO currentRepoDescriptorDAO;
|
||||
|
||||
// Map keyed on rendition id to a Map keyed on source mimetypes to a count of the number of times it has been requested.
|
||||
private final Map<ThumbnailDefinition, Map<String, AtomicInteger>> renditionRequests = new ConcurrentHashMap<>();
|
||||
|
||||
public RenditionsDataCollector(String collectorId, String collectorVersion, String cronExpression,
|
||||
HeartBeatJobScheduler hbJobScheduler)
|
||||
{
|
||||
super(collectorId, collectorVersion, cronExpression, hbJobScheduler);
|
||||
}
|
||||
|
||||
public void setCurrentRepoDescriptorDAO(DescriptorDAO currentRepoDescriptorDAO)
|
||||
{
|
||||
this.currentRepoDescriptorDAO = currentRepoDescriptorDAO;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() throws Exception
|
||||
{
|
||||
PropertyCheck.mandatory(this, "currentRepoDescriptorDAO", currentRepoDescriptorDAO);
|
||||
}
|
||||
|
||||
public void recordRenditionRequest(ThumbnailDefinition rendition, String sourceMimetype)
|
||||
{
|
||||
// Increment the count of renditions. Atomically creates missing parts of the Map structures.
|
||||
renditionRequests.computeIfAbsent(rendition,
|
||||
k -> new ConcurrentHashMap<>()).computeIfAbsent(sourceMimetype,
|
||||
k -> new AtomicInteger()).incrementAndGet();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<HBData> collectData()
|
||||
{
|
||||
List<HBData> collectedData = new LinkedList<>();
|
||||
|
||||
String systemId = this.currentRepoDescriptorDAO.getDescriptor().getId();
|
||||
String collectorId = this.getCollectorId();
|
||||
String collectorVersion = this.getCollectorVersion();
|
||||
Date timestamp = new Date();
|
||||
|
||||
// We don't mind if new renditions are added while we iterate, as we will pick them up next time.
|
||||
for (ThumbnailDefinition rendition : renditionRequests.keySet())
|
||||
{
|
||||
String renditionName = rendition.getName();
|
||||
String targetMimetype = rendition.getMimetype();
|
||||
for (Map.Entry<String, AtomicInteger> entry: renditionRequests.remove(rendition).entrySet())
|
||||
{
|
||||
String sourceMimetype = entry.getKey();
|
||||
AtomicInteger count = entry.getValue();
|
||||
|
||||
Map<String, Object> values = new HashMap<>();
|
||||
values.put("rendition", renditionName);
|
||||
values.put("count", count.intValue());
|
||||
values.put("sourceMimetype", sourceMimetype);
|
||||
values.put("targetMimetype", targetMimetype);
|
||||
|
||||
// Decided it would be simpler to be able to combine results in Kibana from different nodes
|
||||
// and days if the data was flattened (denormalized) out at this point. It is very likely
|
||||
// that different nodes would have different sets of sourceMimetypes which would make summing
|
||||
// the counts harder to do, if there was a single entry for each rendition with a nested
|
||||
// structure for each sourceMimetype.
|
||||
collectedData.add(new HBData(systemId, collectorId, collectorVersion, timestamp, values));
|
||||
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug(renditionName+" "+count+" "+sourceMimetype+" "+targetMimetype);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return collectedData;
|
||||
}
|
||||
}
|
@@ -26,6 +26,7 @@
|
||||
|
||||
package org.alfresco.rest.api.impl;
|
||||
|
||||
import org.alfresco.heartbeat.RenditionsDataCollector;
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.query.PagingResults;
|
||||
import org.alfresco.repo.tenant.TenantService;
|
||||
@@ -106,6 +107,7 @@ public class RenditionsImpl implements Renditions, ResourceLoaderAware
|
||||
private ServiceRegistry serviceRegistry;
|
||||
private ResourceLoader resourceLoader;
|
||||
private TenantService tenantService;
|
||||
private RenditionsDataCollector renditionsDataCollector;
|
||||
|
||||
public void setNodes(Nodes nodes)
|
||||
{
|
||||
@@ -138,6 +140,11 @@ public class RenditionsImpl implements Renditions, ResourceLoaderAware
|
||||
this.tenantService = tenantService;
|
||||
}
|
||||
|
||||
public void setRenditionsDataCollector(RenditionsDataCollector renditionsDataCollector)
|
||||
{
|
||||
this.renditionsDataCollector = renditionsDataCollector;
|
||||
}
|
||||
|
||||
public void init()
|
||||
{
|
||||
PropertyCheck.mandatory(this, "nodes", nodes);
|
||||
@@ -145,6 +152,7 @@ public class RenditionsImpl implements Renditions, ResourceLoaderAware
|
||||
PropertyCheck.mandatory(this, "scriptThumbnailService", scriptThumbnailService);
|
||||
PropertyCheck.mandatory(this, "serviceRegistry", serviceRegistry);
|
||||
PropertyCheck.mandatory(this, "tenantService", tenantService);
|
||||
PropertyCheck.mandatory(this, "renditionsDataCollector", renditionsDataCollector);
|
||||
|
||||
this.nodeService = serviceRegistry.getNodeService();
|
||||
this.actionService = serviceRegistry.getActionService();
|
||||
@@ -291,14 +299,16 @@ public class RenditionsImpl implements Renditions, ResourceLoaderAware
|
||||
|
||||
ContentData contentData = getContentData(sourceNodeRef, true);
|
||||
// Check if anything is currently available to generate thumbnails for the specified mimeType
|
||||
if (!registry.isThumbnailDefinitionAvailable(contentData.getContentUrl(), contentData.getMimetype(), contentData.getSize(), sourceNodeRef,
|
||||
String sourceMimetype = contentData.getMimetype();
|
||||
if (!registry.isThumbnailDefinitionAvailable(contentData.getContentUrl(), sourceMimetype, contentData.getSize(), sourceNodeRef,
|
||||
thumbnailDefinition))
|
||||
{
|
||||
throw new InvalidArgumentException("Unable to create thumbnail '" + thumbnailDefinition.getName() + "' for " +
|
||||
contentData.getMimetype() + " as no transformer is currently available.");
|
||||
sourceMimetype + " as no transformer is currently available.");
|
||||
}
|
||||
|
||||
Action action = ThumbnailHelper.createCreateThumbnailAction(thumbnailDefinition, serviceRegistry);
|
||||
renditionsDataCollector.recordRenditionRequest(thumbnailDefinition, sourceMimetype);
|
||||
|
||||
// Create thumbnail - or else queue for async creation
|
||||
actionService.executeAction(action, sourceNodeRef, true, executeAsync);
|
||||
|
@@ -1362,6 +1362,7 @@
|
||||
<property name="scriptThumbnailService" ref="thumbnailServiceScript" />
|
||||
<property name="serviceRegistry" ref="ServiceRegistry" />
|
||||
<property name="tenantService" ref="tenantService"/>
|
||||
<property name="renditionsDataCollector" ref="renditionsDataCollector"/>
|
||||
</bean>
|
||||
|
||||
<bean id="Renditions" class="org.springframework.aop.framework.ProxyFactoryBean">
|
||||
@@ -1390,6 +1391,15 @@
|
||||
<property name="deletedNodes" ref="DeletedNodes"/>
|
||||
</bean>
|
||||
|
||||
<!-- HeartBeat rest api renditions data collector -->
|
||||
<bean id="renditionsDataCollector" class="org.alfresco.heartbeat.RenditionsDataCollector" parent="hbBaseDataCollector" >
|
||||
<property name="hbJobScheduler" ref="hbNonLockingJobScheduler"/> <!--Schedule a non locking job for this collector, so we gather data from each node -->
|
||||
<constructor-arg name="collectorId" value="acs.repository.renditions"/> <!-- Collector ID -->
|
||||
<constructor-arg name="collectorVersion" value="1.0"/> <!-- Collector version -->
|
||||
<constructor-arg name="cronExpression" value="0 0 0 ? * *"/> <!-- Daily -->
|
||||
<property name="currentRepoDescriptorDAO" ref="currentRepoDescriptorDAO"/>
|
||||
</bean>
|
||||
|
||||
<!-- -->
|
||||
<!-- Authentications REST API -->
|
||||
<!-- -->
|
||||
|
@@ -50,6 +50,7 @@ import org.junit.runners.Suite;
|
||||
org.alfresco.rest.api.tests.TestTags.class,
|
||||
org.alfresco.rest.api.tests.SharedLinkApiTest.class,
|
||||
org.alfresco.rest.api.tests.RenditionsTest.class,
|
||||
org.alfresco.heatbeat.RenditionsDataCollectorTest.class,
|
||||
org.alfresco.rest.api.tests.TestPeople.class,
|
||||
org.alfresco.rest.api.tests.ProbeApiTest.class,
|
||||
})
|
||||
|
@@ -0,0 +1,201 @@
|
||||
/*
|
||||
* #%L
|
||||
* Alfresco Repository
|
||||
* %%
|
||||
* Copyright (C) 2005 - 2018 Alfresco Software Limited
|
||||
* %%
|
||||
* This file is part of the Alfresco software.
|
||||
* If the software was purchased under a paid Alfresco license, the terms of
|
||||
* the paid license agreement will prevail. Otherwise, the software is
|
||||
* provided under the following open source license terms:
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
* #L%
|
||||
*/
|
||||
package org.alfresco.heatbeat;
|
||||
|
||||
import com.sun.management.OperatingSystemMXBean;
|
||||
import com.sun.management.UnixOperatingSystemMXBean;
|
||||
import org.alfresco.heartbeat.RenditionsDataCollector;
|
||||
import org.alfresco.heartbeat.datasender.HBData;
|
||||
import org.alfresco.heartbeat.jobs.HeartBeatJobScheduler;
|
||||
import org.alfresco.repo.descriptor.DescriptorDAO;
|
||||
import org.alfresco.repo.thumbnail.ThumbnailDefinition;
|
||||
import org.alfresco.service.cmr.repository.HBDataCollectorService;
|
||||
import org.alfresco.service.cmr.repository.TransformationOptions;
|
||||
import org.alfresco.service.descriptor.Descriptor;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.mockito.Mock;
|
||||
|
||||
import java.lang.management.ManagementFactory;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
/**
|
||||
* Test the RenditionsDataCollector collects the correct data.
|
||||
*/
|
||||
public class RenditionsDataCollectorTest
|
||||
{
|
||||
private RenditionsDataCollector renditionsDataCollector;
|
||||
private HBDataCollectorService mockCollectorService;
|
||||
private DescriptorDAO mockDescriptorDAO;
|
||||
private List<HBData> collectedData;
|
||||
private HeartBeatJobScheduler mockScheduler;
|
||||
|
||||
private TransformationOptions options = new TransformationOptions();
|
||||
private ThumbnailDefinition doclib = new ThumbnailDefinition("png", options, "doclib");
|
||||
private ThumbnailDefinition preview = new ThumbnailDefinition("pdf", options, "preview");
|
||||
|
||||
@Before
|
||||
public void setUp()
|
||||
{
|
||||
mockDescriptorDAO = mock(DescriptorDAO.class);
|
||||
mockCollectorService = mock(HBDataCollectorService.class);
|
||||
mockScheduler = mock(HeartBeatJobScheduler.class);
|
||||
|
||||
Descriptor mockDescriptor = mock(Descriptor.class);
|
||||
when(mockDescriptor.getId()).thenReturn("mock_id");
|
||||
when(mockDescriptorDAO.getDescriptor()).thenReturn(mockDescriptor);
|
||||
|
||||
renditionsDataCollector = new RenditionsDataCollector("acs.repository.renditions","1.0","0 0 0 ? * *", mockScheduler);
|
||||
renditionsDataCollector.setHbDataCollectorService(mockCollectorService);
|
||||
renditionsDataCollector.setCurrentRepoDescriptorDAO(mockDescriptorDAO);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHBDataFields()
|
||||
{
|
||||
// record 2 renditions
|
||||
renditionsDataCollector.recordRenditionRequest(preview, "docx");
|
||||
renditionsDataCollector.recordRenditionRequest(doclib, "docx");
|
||||
collectedData = renditionsDataCollector.collectData();
|
||||
|
||||
for (HBData data : this.collectedData)
|
||||
{
|
||||
assertNotNull(data.getCollectorId());
|
||||
assertNotNull(data.getCollectorVersion());
|
||||
assertNotNull(data.getSchemaVersion());
|
||||
assertNotNull(data.getSystemId());
|
||||
assertNotNull(data.getTimestamp());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCollectedDataInDetail()
|
||||
{
|
||||
// Record an initial batch of 4 renditions
|
||||
renditionsDataCollector.recordRenditionRequest(doclib, "xls");
|
||||
renditionsDataCollector.recordRenditionRequest(doclib, "xls");
|
||||
renditionsDataCollector.recordRenditionRequest(preview, "docx");
|
||||
renditionsDataCollector.recordRenditionRequest(doclib, "docx");
|
||||
collectedData = renditionsDataCollector.collectData();
|
||||
|
||||
assertEquals("There should have been 3 data elements", 3, collectedData.size());
|
||||
|
||||
Date firstTimestamp = null;
|
||||
for (HBData data : collectedData)
|
||||
{
|
||||
if (firstTimestamp == null)
|
||||
{
|
||||
firstTimestamp = data.getTimestamp();
|
||||
}
|
||||
else
|
||||
{
|
||||
assertEquals("All data in a batch should have the same timestamp", firstTimestamp, data.getTimestamp());
|
||||
}
|
||||
|
||||
Map<String, Object> values = data.getData();
|
||||
assertEquals("There should have been 4 mapped values", 4, values.size());
|
||||
|
||||
String rendition = (String)values.get("rendition");
|
||||
String sourceMimetype = (String)values.get("sourceMimetype");
|
||||
String targetMimetype = (String)values.get("targetMimetype");
|
||||
Integer count = (Integer)values.get("count");
|
||||
|
||||
assertNotNull(rendition);
|
||||
assertNotNull(sourceMimetype);
|
||||
assertNotNull(targetMimetype);
|
||||
assertNotNull(count);
|
||||
}
|
||||
|
||||
assertHBDataContains("doclib", "xls", "png", 2);
|
||||
assertHBDataContains("doclib", "docx", "png", 1);
|
||||
assertHBDataContains("preview", "docx", "pdf", 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultipleCollections() throws InterruptedException
|
||||
{
|
||||
// A batch of 0 renditions
|
||||
collectedData = renditionsDataCollector.collectData();
|
||||
assertEquals("There should have been 0 data elements", 0, collectedData.size());
|
||||
|
||||
// Record a batch of 4 renditions
|
||||
renditionsDataCollector.recordRenditionRequest(doclib, "xls");
|
||||
renditionsDataCollector.recordRenditionRequest(doclib, "xls");
|
||||
renditionsDataCollector.recordRenditionRequest(preview, "docx");
|
||||
renditionsDataCollector.recordRenditionRequest(doclib, "docx");
|
||||
collectedData = renditionsDataCollector.collectData();
|
||||
assertEquals("There should have been 3 data elements", 3, collectedData.size());
|
||||
assertHBDataContains("doclib", "xls", "png", 2);
|
||||
assertHBDataContains("doclib", "docx", "png", 1);
|
||||
assertHBDataContains("preview", "docx", "pdf", 1);
|
||||
Date prevTimestamp = collectedData.get(0).getTimestamp();
|
||||
Thread.sleep(10);
|
||||
|
||||
// A batch of 3 renditions
|
||||
renditionsDataCollector.recordRenditionRequest(doclib, "jpg");
|
||||
renditionsDataCollector.recordRenditionRequest(doclib, "jpg");
|
||||
renditionsDataCollector.recordRenditionRequest(doclib, "jpg");
|
||||
collectedData = renditionsDataCollector.collectData();
|
||||
assertEquals("There should have been 1 data element", 1, collectedData.size());
|
||||
assertHBDataContains("doclib", "jpg", "png", 3);
|
||||
assertNotEquals("The timestamp should have changed", prevTimestamp, collectedData.get(0).getTimestamp());
|
||||
|
||||
// A batch of 0 renditions
|
||||
collectedData = renditionsDataCollector.collectData();
|
||||
assertEquals("There should have been 0 data elements", 0, collectedData.size());
|
||||
|
||||
// A batch of 1 rendition
|
||||
renditionsDataCollector.recordRenditionRequest(doclib, "xls");
|
||||
collectedData = renditionsDataCollector.collectData();
|
||||
assertEquals("There should have been 1 data element", 1, collectedData.size());
|
||||
assertHBDataContains("doclib", "xls", "png", 1);
|
||||
}
|
||||
|
||||
private boolean assertHBDataContains(String rendition, String sourceMimetype, String targetMimetype, int count)
|
||||
{
|
||||
boolean found = false;
|
||||
for (HBData data : collectedData)
|
||||
{
|
||||
Map<String, Object> values = data.getData();
|
||||
|
||||
if (rendition.equals(values.get("rendition")) &&
|
||||
sourceMimetype.equals(values.get("sourceMimetype")) &&
|
||||
targetMimetype.equals(values.get("targetMimetype")) &&
|
||||
count == ((Integer)values.get("count")).intValue())
|
||||
{
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return found;
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user