mirror of
https://github.com/Alfresco/SearchServices.git
synced 2025-09-24 14:31:29 +00:00
SEARCH-146: Moving handler methods again and removing duplicate code
This commit is contained in:
@@ -149,7 +149,7 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
{
|
||||
Class<?> clazz = Class.forName("org.apache.log4j.PropertyConfigurator");
|
||||
Method method = clazz.getMethod("configure", Properties.class);
|
||||
InputStream is = openResource(coreContainer, resource);
|
||||
InputStream is = openResource(coreContainer.getSolrHome(), resource);
|
||||
Properties p = new Properties();
|
||||
p.load(is);
|
||||
method.invoke(null, p);
|
||||
@@ -164,41 +164,6 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
}
|
||||
}
|
||||
|
||||
private InputStream openResource(CoreContainer coreContainer, String resource)
|
||||
{
|
||||
InputStream is = null;
|
||||
try
|
||||
{
|
||||
File f0 = new File(resource);
|
||||
File f = f0;
|
||||
if (!f.isAbsolute())
|
||||
{
|
||||
// try $CWD/$configDir/$resource
|
||||
String path = coreContainer.getSolrHome();
|
||||
path = path.endsWith("/") ? path : path + "/";
|
||||
f = new File(path + resource);
|
||||
}
|
||||
if (f.isFile() && f.canRead())
|
||||
{
|
||||
return new FileInputStream(f);
|
||||
}
|
||||
else if (f != f0)
|
||||
{ // no success with $CWD/$configDir/$resource
|
||||
if (f0.isFile() && f0.canRead()) return new FileInputStream(f0);
|
||||
}
|
||||
// delegate to the class loader (looking into $INSTANCE_DIR/lib jars)
|
||||
is = Thread.currentThread().getContextClassLoader().getResourceAsStream(resource);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
throw new RuntimeException("Error opening " + resource, e);
|
||||
}
|
||||
if (is == null) { throw new RuntimeException("Can't find resource '" + resource + "' in classpath or '"
|
||||
+ coreContainer.getSolrHome() + "', cwd=" + System.getProperty("user.dir")); }
|
||||
return is;
|
||||
}
|
||||
|
||||
|
||||
protected void handleCustomAction(SolrQueryRequest req, SolrQueryResponse rsp)
|
||||
{
|
||||
log.info("######## Handle Custom Action ###########");
|
||||
@@ -861,7 +826,7 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
InformationServer srv = informationServers.get(coreName);
|
||||
if (srv != null)
|
||||
{
|
||||
addCoreSummary(coreName, detail, hist, values, srv, report);
|
||||
addCoreSummary(trackerRegistry, coreName, detail, hist, values, srv, report);
|
||||
|
||||
if (reset)
|
||||
{
|
||||
@@ -875,191 +840,6 @@ public class AlfrescoCoreAdminHandler extends CoreAdminHandler
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @param cname
|
||||
* @param detail
|
||||
* @param hist
|
||||
* @param values
|
||||
* @param srv
|
||||
* @param report
|
||||
* @throws IOException
|
||||
*/
|
||||
private void addCoreSummary(String cname, boolean detail, boolean hist, boolean values,
|
||||
InformationServer srv, NamedList<Object> report) throws IOException
|
||||
{
|
||||
NamedList<Object> coreSummary = new SimpleOrderedMap<Object>();
|
||||
coreSummary.addAll((SimpleOrderedMap<Object>) srv.getCoreStats());
|
||||
|
||||
MetadataTracker metaTrkr = trackerRegistry.getTrackerForCore(cname, MetadataTracker.class);
|
||||
TrackerState metadataTrkrState = metaTrkr.getTrackerState();
|
||||
long lastIndexTxCommitTime = metadataTrkrState.getLastIndexedTxCommitTime();
|
||||
|
||||
long lastIndexedTxId = metadataTrkrState.getLastIndexedTxId();
|
||||
long lastTxCommitTimeOnServer = metadataTrkrState.getLastTxCommitTimeOnServer();
|
||||
long lastTxIdOnServer = metadataTrkrState.getLastTxIdOnServer();
|
||||
Date lastIndexTxCommitDate = new Date(lastIndexTxCommitTime);
|
||||
Date lastTxOnServerDate = new Date(lastTxCommitTimeOnServer);
|
||||
long transactionsToDo = lastTxIdOnServer - lastIndexedTxId;
|
||||
if (transactionsToDo < 0)
|
||||
{
|
||||
transactionsToDo = 0;
|
||||
}
|
||||
|
||||
AclTracker aclTrkr = trackerRegistry.getTrackerForCore(cname, AclTracker.class);
|
||||
TrackerState aclTrkrState = aclTrkr.getTrackerState();
|
||||
long lastIndexChangeSetCommitTime = aclTrkrState.getLastIndexedChangeSetCommitTime();
|
||||
long lastIndexedChangeSetId = aclTrkrState.getLastIndexedChangeSetId();
|
||||
long lastChangeSetCommitTimeOnServer = aclTrkrState.getLastChangeSetCommitTimeOnServer();
|
||||
long lastChangeSetIdOnServer = aclTrkrState.getLastChangeSetIdOnServer();
|
||||
Date lastIndexChangeSetCommitDate = new Date(lastIndexChangeSetCommitTime);
|
||||
Date lastChangeSetOnServerDate = new Date(lastChangeSetCommitTimeOnServer);
|
||||
long changeSetsToDo = lastChangeSetIdOnServer - lastIndexedChangeSetId;
|
||||
if (changeSetsToDo < 0)
|
||||
{
|
||||
changeSetsToDo = 0;
|
||||
}
|
||||
|
||||
long nodesToDo = 0;
|
||||
long remainingTxTimeMillis = 0;
|
||||
if (transactionsToDo > 0)
|
||||
{
|
||||
// We now use the elapsed time as seen by the single thread farming out metadata indexing
|
||||
double meanDocsPerTx = srv.getTrackerStats().getMeanDocsPerTx();
|
||||
double meanNodeElaspedIndexTime = srv.getTrackerStats().getMeanNodeElapsedIndexTime();
|
||||
nodesToDo = (long)(transactionsToDo * meanDocsPerTx);
|
||||
remainingTxTimeMillis = (long) (nodesToDo * meanNodeElaspedIndexTime);
|
||||
}
|
||||
Date now = new Date();
|
||||
Date end = new Date(now.getTime() + remainingTxTimeMillis);
|
||||
Duration remainingTx = new Duration(now, end);
|
||||
|
||||
long remainingChangeSetTimeMillis = 0;
|
||||
if (changeSetsToDo > 0)
|
||||
{
|
||||
// We now use the elapsed time as seen by the single thread farming out alc indexing
|
||||
double meanAclsPerChangeSet = srv.getTrackerStats().getMeanAclsPerChangeSet();
|
||||
double meanAclElapsedIndexTime = srv.getTrackerStats().getMeanAclElapsedIndexTime();
|
||||
remainingChangeSetTimeMillis = (long) (changeSetsToDo * meanAclsPerChangeSet * meanAclElapsedIndexTime);
|
||||
}
|
||||
now = new Date();
|
||||
end = new Date(now.getTime() + remainingChangeSetTimeMillis);
|
||||
Duration remainingChangeSet = new Duration(now, end);
|
||||
|
||||
NamedList<Object> ftsSummary = new SimpleOrderedMap<Object>();
|
||||
long remainingContentTimeMillis = 0;
|
||||
srv.addFTSStatusCounts(ftsSummary);
|
||||
long cleanCount = ((Long)ftsSummary.get("Node count with FTSStatus Clean")).longValue();
|
||||
long dirtyCount = ((Long)ftsSummary.get("Node count with FTSStatus Dirty")).longValue();
|
||||
long newCount = ((Long)ftsSummary.get("Node count with FTSStatus New")).longValue();
|
||||
long nodesInIndex = ((Long)coreSummary.get("Alfresco Nodes in Index"));
|
||||
long contentYetToSee = nodesInIndex > 0 ? nodesToDo * (cleanCount + dirtyCount + newCount)/nodesInIndex : 0;;
|
||||
if (dirtyCount + newCount + contentYetToSee > 0)
|
||||
{
|
||||
// We now use the elapsed time as seen by the single thread farming out alc indexing
|
||||
double meanContentElapsedIndexTime = srv.getTrackerStats().getMeanContentElapsedIndexTime();
|
||||
remainingContentTimeMillis = (long) ((dirtyCount + newCount + contentYetToSee) * meanContentElapsedIndexTime);
|
||||
}
|
||||
now = new Date();
|
||||
end = new Date(now.getTime() + remainingContentTimeMillis);
|
||||
Duration remainingContent = new Duration(now, end);
|
||||
coreSummary.add("FTS",ftsSummary);
|
||||
|
||||
Duration txLag = new Duration(lastIndexTxCommitDate, lastTxOnServerDate);
|
||||
if (lastIndexTxCommitDate.compareTo(lastTxOnServerDate) > 0)
|
||||
{
|
||||
txLag = new Duration();
|
||||
}
|
||||
long txLagSeconds = (lastTxCommitTimeOnServer - lastIndexTxCommitTime) / 1000;
|
||||
if (txLagSeconds < 0)
|
||||
{
|
||||
txLagSeconds = 0;
|
||||
}
|
||||
|
||||
Duration changeSetLag = new Duration(lastIndexChangeSetCommitDate, lastChangeSetOnServerDate);
|
||||
if (lastIndexChangeSetCommitDate.compareTo(lastChangeSetOnServerDate) > 0)
|
||||
{
|
||||
changeSetLag = new Duration();
|
||||
}
|
||||
long changeSetLagSeconds = (lastChangeSetCommitTimeOnServer - lastIndexChangeSetCommitTime) / 1000;
|
||||
if (txLagSeconds < 0)
|
||||
{
|
||||
txLagSeconds = 0;
|
||||
}
|
||||
|
||||
ContentTracker contentTrkr = trackerRegistry.getTrackerForCore(cname, ContentTracker.class);
|
||||
TrackerState contentTrkrState = contentTrkr.getTrackerState();
|
||||
// Leave ModelTracker out of this check, because it is common
|
||||
boolean aTrackerIsRunning = aclTrkrState.isRunning() || metadataTrkrState.isRunning()
|
||||
|| contentTrkrState.isRunning();
|
||||
coreSummary.add("Active", aTrackerIsRunning);
|
||||
|
||||
ModelTracker modelTrkr = trackerRegistry.getModelTracker();
|
||||
TrackerState modelTrkrState = modelTrkr.getTrackerState();
|
||||
coreSummary.add("ModelTracker Active", modelTrkrState.isRunning());
|
||||
coreSummary.add("ContentTracker Active", contentTrkrState.isRunning());
|
||||
coreSummary.add("MetadataTracker Active", metadataTrkrState.isRunning());
|
||||
coreSummary.add("AclTracker Active", aclTrkrState.isRunning());
|
||||
|
||||
// TX
|
||||
|
||||
coreSummary.add("Last Index TX Commit Time", lastIndexTxCommitTime);
|
||||
coreSummary.add("Last Index TX Commit Date", lastIndexTxCommitDate);
|
||||
coreSummary.add("TX Lag", txLagSeconds + " s");
|
||||
coreSummary.add("TX Duration", txLag.toString());
|
||||
coreSummary.add("Timestamp for last TX on server", lastTxCommitTimeOnServer);
|
||||
coreSummary.add("Date for last TX on server", lastTxOnServerDate);
|
||||
coreSummary.add("Id for last TX on server", lastTxIdOnServer);
|
||||
coreSummary.add("Id for last TX in index", lastIndexedTxId);
|
||||
coreSummary.add("Approx transactions remaining", transactionsToDo);
|
||||
coreSummary.add("Approx transaction indexing time remaining", remainingTx.largestComponentformattedString());
|
||||
|
||||
// Change set
|
||||
|
||||
coreSummary.add("Last Index Change Set Commit Time", lastIndexChangeSetCommitTime);
|
||||
coreSummary.add("Last Index Change Set Commit Date", lastIndexChangeSetCommitDate);
|
||||
coreSummary.add("Change Set Lag", changeSetLagSeconds + " s");
|
||||
coreSummary.add("Change Set Duration", changeSetLag.toString());
|
||||
coreSummary.add("Timestamp for last Change Set on server", lastChangeSetCommitTimeOnServer);
|
||||
coreSummary.add("Date for last Change Set on server", lastChangeSetOnServerDate);
|
||||
coreSummary.add("Id for last Change Set on server", lastChangeSetIdOnServer);
|
||||
coreSummary.add("Id for last Change Set in index", lastIndexedChangeSetId);
|
||||
coreSummary.add("Approx change sets remaining", changeSetsToDo);
|
||||
coreSummary.add("Approx change set indexing time remaining",
|
||||
remainingChangeSet.largestComponentformattedString());
|
||||
|
||||
coreSummary.add("Approx content indexing time remaining",
|
||||
remainingContent.largestComponentformattedString());
|
||||
|
||||
// Stats
|
||||
|
||||
coreSummary.add("Model sync times (ms)",
|
||||
srv.getTrackerStats().getModelTimes().getNamedList(detail, hist, values));
|
||||
coreSummary.add("Acl index time (ms)",
|
||||
srv.getTrackerStats().getAclTimes().getNamedList(detail, hist, values));
|
||||
coreSummary.add("Node index time (ms)",
|
||||
srv.getTrackerStats().getNodeTimes().getNamedList(detail, hist, values));
|
||||
coreSummary.add("Docs/Tx", srv.getTrackerStats().getTxDocs().getNamedList(detail, hist, values));
|
||||
coreSummary.add("Doc Transformation time (ms)", srv.getTrackerStats().getDocTransformationTimes()
|
||||
.getNamedList(detail, hist, values));
|
||||
|
||||
// Model
|
||||
|
||||
Map<String, Set<String>> modelErrors = srv.getModelErrors();
|
||||
if (modelErrors.size() > 0)
|
||||
{
|
||||
NamedList<Object> errorList = new SimpleOrderedMap<Object>();
|
||||
for (Map.Entry<String, Set<String>> modelNameToErrors : modelErrors.entrySet())
|
||||
{
|
||||
errorList.add(modelNameToErrors.getKey(), modelNameToErrors.getValue());
|
||||
}
|
||||
coreSummary.add("Model changes are not compatible with the existing data model and have not been applied",
|
||||
errorList);
|
||||
}
|
||||
|
||||
report.add(cname, coreSummary);
|
||||
}
|
||||
|
||||
|
||||
private void actionINDEX(SolrParams params, String coreName)
|
||||
{
|
||||
if (params.get(ARG_TXID) != null)
|
||||
|
@@ -1,6 +1,8 @@
|
||||
package org.alfresco.solr;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.solr.common.params.SolrParams;
|
||||
import org.apache.solr.core.CoreContainer;
|
||||
|
||||
import java.io.*;
|
||||
|
||||
@@ -8,8 +10,7 @@ import java.io.*;
|
||||
* Created by gethin on 13/09/16.
|
||||
*/
|
||||
public class HandlerOfResources {
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Note files can alter due to background processes so file not found is Ok
|
||||
*
|
||||
@@ -20,129 +21,52 @@ public class HandlerOfResources {
|
||||
*/
|
||||
public static void copyDirectory(File srcDir, File destDir, boolean preserveFileDate) throws IOException
|
||||
{
|
||||
if (destDir.exists())
|
||||
{
|
||||
throw new IOException("Destination should be created from clean");
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!destDir.mkdirs()) { throw new IOException("Destination '" + destDir + "' directory cannot be created"); }
|
||||
if (preserveFileDate)
|
||||
{
|
||||
// OL if file not found so does not need to check
|
||||
destDir.setLastModified(srcDir.lastModified());
|
||||
}
|
||||
}
|
||||
if (!destDir.canWrite()) { throw new IOException("No access to destination directory" + destDir); }
|
||||
|
||||
File[] files = srcDir.listFiles();
|
||||
if (files != null)
|
||||
{
|
||||
for (int i = 0; i < files.length; i++)
|
||||
{
|
||||
File currentCopyTarget = new File(destDir, files[i].getName());
|
||||
if (files[i].isDirectory())
|
||||
{
|
||||
copyDirectory(files[i], currentCopyTarget, preserveFileDate);
|
||||
}
|
||||
else
|
||||
{
|
||||
copyFile(files[i], currentCopyTarget, preserveFileDate);
|
||||
}
|
||||
}
|
||||
}
|
||||
FileUtils.copyDirectory(srcDir,destDir,preserveFileDate);
|
||||
}
|
||||
|
||||
public static void copyFile(File srcFile, File destFile, boolean preserveFileDate) throws IOException
|
||||
{
|
||||
try
|
||||
{
|
||||
if (destFile.exists()) { throw new IOException("File shoud not exist " + destFile); }
|
||||
|
||||
FileInputStream input = new FileInputStream(srcFile);
|
||||
try
|
||||
{
|
||||
FileOutputStream output = new FileOutputStream(destFile);
|
||||
try
|
||||
{
|
||||
copy(input, output);
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
output.close();
|
||||
}
|
||||
catch (IOException io)
|
||||
{
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
input.close();
|
||||
}
|
||||
catch (IOException io)
|
||||
{
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// check copy
|
||||
if (srcFile.length() != destFile.length()) { throw new IOException("Failed to copy full from '" + srcFile
|
||||
+ "' to '" + destFile + "'"); }
|
||||
if (preserveFileDate)
|
||||
{
|
||||
destFile.setLastModified(srcFile.lastModified());
|
||||
}
|
||||
}
|
||||
catch (FileNotFoundException fnfe)
|
||||
{
|
||||
fnfe.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
public static int copy(InputStream input, OutputStream output) throws IOException
|
||||
{
|
||||
byte[] buffer = new byte[2048 * 4];
|
||||
int count = 0;
|
||||
int n = 0;
|
||||
while ((n = input.read(buffer)) != -1)
|
||||
{
|
||||
output.write(buffer, 0, n);
|
||||
count += n;
|
||||
}
|
||||
return count;
|
||||
FileUtils.copyFile(srcFile,destFile,preserveFileDate);
|
||||
}
|
||||
|
||||
public void deleteDirectory(File directory) throws IOException
|
||||
{
|
||||
if (!directory.exists()) { return; }
|
||||
if (!directory.isDirectory()) { throw new IllegalArgumentException("Not a directory " + directory); }
|
||||
|
||||
File[] files = directory.listFiles();
|
||||
if (files == null) { throw new IOException("Failed to delete director - no access" + directory); }
|
||||
|
||||
for (int i = 0; i < files.length; i++)
|
||||
{
|
||||
File file = files[i];
|
||||
|
||||
if (file.isDirectory())
|
||||
{
|
||||
deleteDirectory(file);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!file.delete()) { throw new IOException("Unable to delete file: " + file); }
|
||||
}
|
||||
}
|
||||
|
||||
if (!directory.delete()) { throw new IOException("Unable to delete directory " + directory); }
|
||||
FileUtils.deleteDirectory(directory);
|
||||
}
|
||||
|
||||
public static InputStream openResource(String solrHome, String resource)
|
||||
{
|
||||
InputStream is = null;
|
||||
try
|
||||
{
|
||||
File f0 = new File(resource);
|
||||
File f = f0;
|
||||
if (!f.isAbsolute())
|
||||
{
|
||||
// try $CWD/$configDir/$resource
|
||||
String path = solrHome;
|
||||
path = path.endsWith("/") ? path : path + "/";
|
||||
f = new File(path + resource);
|
||||
}
|
||||
if (f.isFile() && f.canRead())
|
||||
{
|
||||
return new FileInputStream(f);
|
||||
}
|
||||
else if (f != f0)
|
||||
{ // no success with $CWD/$configDir/$resource
|
||||
if (f0.isFile() && f0.canRead()) return new FileInputStream(f0);
|
||||
}
|
||||
// delegate to the class loader (looking into $INSTANCE_DIR/lib jars)
|
||||
is = Thread.currentThread().getContextClassLoader().getResourceAsStream(resource);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
throw new RuntimeException("Error opening " + resource, e);
|
||||
}
|
||||
if (is == null) { throw new RuntimeException("Can't find resource '" + resource + "' in classpath or '"
|
||||
+ solrHome + "', cwd=" + System.getProperty("user.dir")); }
|
||||
return is;
|
||||
}
|
||||
|
||||
public static boolean getSafeBoolean(SolrParams params, String paramName)
|
||||
{
|
||||
|
@@ -1,11 +1,9 @@
|
||||
package org.alfresco.solr;
|
||||
|
||||
import org.alfresco.httpclient.AuthenticationException;
|
||||
import org.alfresco.service.cmr.repository.datatype.Duration;
|
||||
import org.alfresco.solr.client.Node;
|
||||
import org.alfresco.solr.tracker.AclTracker;
|
||||
import org.alfresco.solr.tracker.IndexHealthReport;
|
||||
import org.alfresco.solr.tracker.MetadataTracker;
|
||||
import org.alfresco.solr.tracker.TrackerRegistry;
|
||||
import org.alfresco.solr.tracker.*;
|
||||
import org.alfresco.util.CachingDateFormat;
|
||||
import org.apache.commons.codec.EncoderException;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
@@ -15,6 +13,8 @@ import org.json.JSONException;
|
||||
import java.io.IOException;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Created by gethin on 13/09/16.
|
||||
@@ -22,6 +22,19 @@ import java.util.List;
|
||||
public class HandlerReportBuilder {
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @param trackerRegistry
|
||||
* @param srv
|
||||
* @param coreName
|
||||
* @param tracker
|
||||
* @param acltxid
|
||||
* @return
|
||||
* @throws AuthenticationException
|
||||
* @throws IOException
|
||||
* @throws JSONException
|
||||
* @throws EncoderException
|
||||
*/
|
||||
public static NamedList<Object> buildAclTxReport(TrackerRegistry trackerRegistry, InformationServer srv, String coreName, AclTracker tracker, Long acltxid)
|
||||
throws AuthenticationException, IOException, JSONException, EncoderException
|
||||
{
|
||||
@@ -212,4 +225,187 @@ public class HandlerReportBuilder {
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @param cname
|
||||
* @param detail
|
||||
* @param hist
|
||||
* @param values
|
||||
* @param srv
|
||||
* @param report
|
||||
* @throws IOException
|
||||
*/
|
||||
public static void addCoreSummary(TrackerRegistry trackerRegistry, String cname, boolean detail, boolean hist, boolean values,
|
||||
InformationServer srv, NamedList<Object> report) throws IOException
|
||||
{
|
||||
NamedList<Object> coreSummary = new SimpleOrderedMap<Object>();
|
||||
coreSummary.addAll((SimpleOrderedMap<Object>) srv.getCoreStats());
|
||||
|
||||
MetadataTracker metaTrkr = trackerRegistry.getTrackerForCore(cname, MetadataTracker.class);
|
||||
TrackerState metadataTrkrState = metaTrkr.getTrackerState();
|
||||
long lastIndexTxCommitTime = metadataTrkrState.getLastIndexedTxCommitTime();
|
||||
|
||||
long lastIndexedTxId = metadataTrkrState.getLastIndexedTxId();
|
||||
long lastTxCommitTimeOnServer = metadataTrkrState.getLastTxCommitTimeOnServer();
|
||||
long lastTxIdOnServer = metadataTrkrState.getLastTxIdOnServer();
|
||||
Date lastIndexTxCommitDate = new Date(lastIndexTxCommitTime);
|
||||
Date lastTxOnServerDate = new Date(lastTxCommitTimeOnServer);
|
||||
long transactionsToDo = lastTxIdOnServer - lastIndexedTxId;
|
||||
if (transactionsToDo < 0)
|
||||
{
|
||||
transactionsToDo = 0;
|
||||
}
|
||||
|
||||
AclTracker aclTrkr = trackerRegistry.getTrackerForCore(cname, AclTracker.class);
|
||||
TrackerState aclTrkrState = aclTrkr.getTrackerState();
|
||||
long lastIndexChangeSetCommitTime = aclTrkrState.getLastIndexedChangeSetCommitTime();
|
||||
long lastIndexedChangeSetId = aclTrkrState.getLastIndexedChangeSetId();
|
||||
long lastChangeSetCommitTimeOnServer = aclTrkrState.getLastChangeSetCommitTimeOnServer();
|
||||
long lastChangeSetIdOnServer = aclTrkrState.getLastChangeSetIdOnServer();
|
||||
Date lastIndexChangeSetCommitDate = new Date(lastIndexChangeSetCommitTime);
|
||||
Date lastChangeSetOnServerDate = new Date(lastChangeSetCommitTimeOnServer);
|
||||
long changeSetsToDo = lastChangeSetIdOnServer - lastIndexedChangeSetId;
|
||||
if (changeSetsToDo < 0)
|
||||
{
|
||||
changeSetsToDo = 0;
|
||||
}
|
||||
|
||||
long nodesToDo = 0;
|
||||
long remainingTxTimeMillis = 0;
|
||||
if (transactionsToDo > 0)
|
||||
{
|
||||
// We now use the elapsed time as seen by the single thread farming out metadata indexing
|
||||
double meanDocsPerTx = srv.getTrackerStats().getMeanDocsPerTx();
|
||||
double meanNodeElaspedIndexTime = srv.getTrackerStats().getMeanNodeElapsedIndexTime();
|
||||
nodesToDo = (long)(transactionsToDo * meanDocsPerTx);
|
||||
remainingTxTimeMillis = (long) (nodesToDo * meanNodeElaspedIndexTime);
|
||||
}
|
||||
Date now = new Date();
|
||||
Date end = new Date(now.getTime() + remainingTxTimeMillis);
|
||||
Duration remainingTx = new Duration(now, end);
|
||||
|
||||
long remainingChangeSetTimeMillis = 0;
|
||||
if (changeSetsToDo > 0)
|
||||
{
|
||||
// We now use the elapsed time as seen by the single thread farming out alc indexing
|
||||
double meanAclsPerChangeSet = srv.getTrackerStats().getMeanAclsPerChangeSet();
|
||||
double meanAclElapsedIndexTime = srv.getTrackerStats().getMeanAclElapsedIndexTime();
|
||||
remainingChangeSetTimeMillis = (long) (changeSetsToDo * meanAclsPerChangeSet * meanAclElapsedIndexTime);
|
||||
}
|
||||
now = new Date();
|
||||
end = new Date(now.getTime() + remainingChangeSetTimeMillis);
|
||||
Duration remainingChangeSet = new Duration(now, end);
|
||||
|
||||
NamedList<Object> ftsSummary = new SimpleOrderedMap<Object>();
|
||||
long remainingContentTimeMillis = 0;
|
||||
srv.addFTSStatusCounts(ftsSummary);
|
||||
long cleanCount = ((Long)ftsSummary.get("Node count with FTSStatus Clean")).longValue();
|
||||
long dirtyCount = ((Long)ftsSummary.get("Node count with FTSStatus Dirty")).longValue();
|
||||
long newCount = ((Long)ftsSummary.get("Node count with FTSStatus New")).longValue();
|
||||
long nodesInIndex = ((Long)coreSummary.get("Alfresco Nodes in Index"));
|
||||
long contentYetToSee = nodesInIndex > 0 ? nodesToDo * (cleanCount + dirtyCount + newCount)/nodesInIndex : 0;;
|
||||
if (dirtyCount + newCount + contentYetToSee > 0)
|
||||
{
|
||||
// We now use the elapsed time as seen by the single thread farming out alc indexing
|
||||
double meanContentElapsedIndexTime = srv.getTrackerStats().getMeanContentElapsedIndexTime();
|
||||
remainingContentTimeMillis = (long) ((dirtyCount + newCount + contentYetToSee) * meanContentElapsedIndexTime);
|
||||
}
|
||||
now = new Date();
|
||||
end = new Date(now.getTime() + remainingContentTimeMillis);
|
||||
Duration remainingContent = new Duration(now, end);
|
||||
coreSummary.add("FTS",ftsSummary);
|
||||
|
||||
Duration txLag = new Duration(lastIndexTxCommitDate, lastTxOnServerDate);
|
||||
if (lastIndexTxCommitDate.compareTo(lastTxOnServerDate) > 0)
|
||||
{
|
||||
txLag = new Duration();
|
||||
}
|
||||
long txLagSeconds = (lastTxCommitTimeOnServer - lastIndexTxCommitTime) / 1000;
|
||||
if (txLagSeconds < 0)
|
||||
{
|
||||
txLagSeconds = 0;
|
||||
}
|
||||
|
||||
Duration changeSetLag = new Duration(lastIndexChangeSetCommitDate, lastChangeSetOnServerDate);
|
||||
if (lastIndexChangeSetCommitDate.compareTo(lastChangeSetOnServerDate) > 0)
|
||||
{
|
||||
changeSetLag = new Duration();
|
||||
}
|
||||
long changeSetLagSeconds = (lastChangeSetCommitTimeOnServer - lastIndexChangeSetCommitTime) / 1000;
|
||||
if (txLagSeconds < 0)
|
||||
{
|
||||
txLagSeconds = 0;
|
||||
}
|
||||
|
||||
ContentTracker contentTrkr = trackerRegistry.getTrackerForCore(cname, ContentTracker.class);
|
||||
TrackerState contentTrkrState = contentTrkr.getTrackerState();
|
||||
// Leave ModelTracker out of this check, because it is common
|
||||
boolean aTrackerIsRunning = aclTrkrState.isRunning() || metadataTrkrState.isRunning()
|
||||
|| contentTrkrState.isRunning();
|
||||
coreSummary.add("Active", aTrackerIsRunning);
|
||||
|
||||
ModelTracker modelTrkr = trackerRegistry.getModelTracker();
|
||||
TrackerState modelTrkrState = modelTrkr.getTrackerState();
|
||||
coreSummary.add("ModelTracker Active", modelTrkrState.isRunning());
|
||||
coreSummary.add("ContentTracker Active", contentTrkrState.isRunning());
|
||||
coreSummary.add("MetadataTracker Active", metadataTrkrState.isRunning());
|
||||
coreSummary.add("AclTracker Active", aclTrkrState.isRunning());
|
||||
|
||||
// TX
|
||||
|
||||
coreSummary.add("Last Index TX Commit Time", lastIndexTxCommitTime);
|
||||
coreSummary.add("Last Index TX Commit Date", lastIndexTxCommitDate);
|
||||
coreSummary.add("TX Lag", txLagSeconds + " s");
|
||||
coreSummary.add("TX Duration", txLag.toString());
|
||||
coreSummary.add("Timestamp for last TX on server", lastTxCommitTimeOnServer);
|
||||
coreSummary.add("Date for last TX on server", lastTxOnServerDate);
|
||||
coreSummary.add("Id for last TX on server", lastTxIdOnServer);
|
||||
coreSummary.add("Id for last TX in index", lastIndexedTxId);
|
||||
coreSummary.add("Approx transactions remaining", transactionsToDo);
|
||||
coreSummary.add("Approx transaction indexing time remaining", remainingTx.largestComponentformattedString());
|
||||
|
||||
// Change set
|
||||
|
||||
coreSummary.add("Last Index Change Set Commit Time", lastIndexChangeSetCommitTime);
|
||||
coreSummary.add("Last Index Change Set Commit Date", lastIndexChangeSetCommitDate);
|
||||
coreSummary.add("Change Set Lag", changeSetLagSeconds + " s");
|
||||
coreSummary.add("Change Set Duration", changeSetLag.toString());
|
||||
coreSummary.add("Timestamp for last Change Set on server", lastChangeSetCommitTimeOnServer);
|
||||
coreSummary.add("Date for last Change Set on server", lastChangeSetOnServerDate);
|
||||
coreSummary.add("Id for last Change Set on server", lastChangeSetIdOnServer);
|
||||
coreSummary.add("Id for last Change Set in index", lastIndexedChangeSetId);
|
||||
coreSummary.add("Approx change sets remaining", changeSetsToDo);
|
||||
coreSummary.add("Approx change set indexing time remaining",
|
||||
remainingChangeSet.largestComponentformattedString());
|
||||
|
||||
coreSummary.add("Approx content indexing time remaining",
|
||||
remainingContent.largestComponentformattedString());
|
||||
|
||||
// Stats
|
||||
|
||||
coreSummary.add("Model sync times (ms)",
|
||||
srv.getTrackerStats().getModelTimes().getNamedList(detail, hist, values));
|
||||
coreSummary.add("Acl index time (ms)",
|
||||
srv.getTrackerStats().getAclTimes().getNamedList(detail, hist, values));
|
||||
coreSummary.add("Node index time (ms)",
|
||||
srv.getTrackerStats().getNodeTimes().getNamedList(detail, hist, values));
|
||||
coreSummary.add("Docs/Tx", srv.getTrackerStats().getTxDocs().getNamedList(detail, hist, values));
|
||||
coreSummary.add("Doc Transformation time (ms)", srv.getTrackerStats().getDocTransformationTimes()
|
||||
.getNamedList(detail, hist, values));
|
||||
|
||||
// Model
|
||||
|
||||
Map<String, Set<String>> modelErrors = srv.getModelErrors();
|
||||
if (modelErrors.size() > 0)
|
||||
{
|
||||
NamedList<Object> errorList = new SimpleOrderedMap<Object>();
|
||||
for (Map.Entry<String, Set<String>> modelNameToErrors : modelErrors.entrySet())
|
||||
{
|
||||
errorList.add(modelNameToErrors.getKey(), modelNameToErrors.getValue());
|
||||
}
|
||||
coreSummary.add("Model changes are not compatible with the existing data model and have not been applied",
|
||||
errorList);
|
||||
}
|
||||
|
||||
report.add(cname, coreSummary);
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user