mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-07-31 17:39:05 +00:00
Merged V2.9 to HEAD
9018: MT: system-wide jobs should run across all stores/indexes 9204: Merged V2.2 to V2.9 8633: Merged V2.1 to V2.2 8629: Merged V2.1-A to V2.1 8493: Fixed ADB-51: ImporterBootstrap doesn't use transaction retrying 8494: EHCache and JGroup patches and upgrades 8546: ACT-1650: performance optimization 8550: Fixes to transactional cache handling 8553: Fixed tests: MLText is a Map, but will always have at least one entry, even that entry is null. 8583: ACT-954: IndexInfo files now reopen when they close (for whatever reason) 8640: Merged V2.1 to V2.2 8638: Used correct exception type for IO channel reopen logic 9102: Unit test to check that transactional cache size overrun is handled 9106: Merged V2.1 to V2.2 9043: Fixed AR-2291: SchemaBootstrap lock is only required before first SQL execution 9045: Fix AR-2291: SchemaBootstrap lock is only required before first SQL execution 9047: Fixed AR-2305: Index tracking in AUTO mode doesn't report anything on bootstrap 9048: Fixed AR-2300: Random-based GUID instead of time-based GUIDs 9049: Fix patches to only run once 9050 <Defered>: Changed getString() method to use the available buffer length rather than a hard coded value. 9060: Fixed ETWOONE-109 and ETWOONE-128: RetryingTransactionHelper fixes and improvements 9061: Fixed NodeRefPropertyMethodInterceptorTest 9075 <Defered>: Added delete permission check when marking a file for delete on close. ETWOONE-141/ACT-2416. 9080: Fixed EHCache source zip 9081: Fixed ETWOONE-118: Tomcat failed bootstrap doesn't clean up EHCache cluster structures 9085: Fixed ETWOONE-154: Added JSR107 Jar to WAR 9115: Fixed test: TransactionalCache uses LRU so repeatedly checking if a entry is there keeps it in the cache. 9206: Merged V2.2 to V2.9 8857: Improvements to ACL performance for large ACLs 8951: Always check permission entry changes are made at position 0 9219 <No change>: Made NTLMLogonDetails class Serializable, port of r8973. 9220: Added delete permission check when marking a file for delete on close. Port of r9075. 9222: Merged V2.1 to V2.9 8683: Early warning for nodes indexed in the wrong store (ACT-964) 8684: Enhanced tests 8685: Enhanced tests 8686: Additional tests 9223: Merged V2.2 to V2.9 9120: Merged V2.1 to V2.2 8740: Fix for AR-2173 - do no recheck case of the user name when validating tickets (it has been done) 9122: Additional unit test from support case. 9224: Merged V2.2 to V2.9 9076: Fixed ETWOTWO-426: Upgrading alfresco from 2.1.1 to 2.2 throws errors with Mysql 5.0.51 9104: Merged V2.1 to V2.2 9025: Fixed AR-2314, AR-2299: Optimizations after profiling 9105: Merged V2.1 to V2.2 8745: Fix AR-2233 (regression introduced by fix for AR-2221) 9121: Merged V2.1 to V2.2 9017: Fix index back up failing due to background index merge/deletions (inlcudes back port of CHK-2588) 9137: Incorporated additions from Will into AVM console (ETWOTWO-439) 9225: Merged V2.1 to V2.9 8641: Merged V2.1-A to V2.1 7729: Fix to Repository Web Service (queryAssociated) to allow reverse association lookup (ie. given target, get the source) 8673: Fix for AR-2098 - shorter URL form now has NTLM filter mapping example in web.xml 8682: Fix for AR-2005 8695: AR-2054. 8696: Improved sort test to include prefix form of field name 9226: Fix ALFCOM-994 (see also earlier change in r9223) git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@9233 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
@@ -183,6 +183,10 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
||||
try
|
||||
{
|
||||
NodeRef childRef = relationshipRef.getChildRef();
|
||||
if(!childRef.getStoreRef().equals(store))
|
||||
{
|
||||
throw new LuceneIndexException("Create node failed - node is not in the required store");
|
||||
}
|
||||
// If we have the root node we delete all other root nodes first
|
||||
if ((relationshipRef.getParentRef() == null)
|
||||
&& tenantService.getBaseName(childRef).equals(nodeService.getRootNode(childRef.getStoreRef())))
|
||||
@@ -249,6 +253,10 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
||||
checkAbleToDoWork(IndexUpdateStatus.SYNCRONOUS);
|
||||
try
|
||||
{
|
||||
if(!nodeRef.getStoreRef().equals(store))
|
||||
{
|
||||
throw new LuceneIndexException("Update node failed - node is not in the required store");
|
||||
}
|
||||
reindex(nodeRef, false);
|
||||
}
|
||||
catch (LuceneIndexException e)
|
||||
@@ -267,6 +275,10 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
||||
checkAbleToDoWork(IndexUpdateStatus.SYNCRONOUS);
|
||||
try
|
||||
{
|
||||
if(!relationshipRef.getChildRef().getStoreRef().equals(store))
|
||||
{
|
||||
throw new LuceneIndexException("Delete node failed - node is not in the required store");
|
||||
}
|
||||
// The requires a reindex - a delete may remove too much from under this node - that also lives under
|
||||
// other nodes via secondary associations. All the nodes below require reindex.
|
||||
// This is true if the deleted node is via secondary or primary assoc.
|
||||
@@ -290,6 +302,10 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
||||
{
|
||||
// TODO: Optimise
|
||||
// reindex(relationshipRef.getParentRef());
|
||||
if(!relationshipRef.getChildRef().getStoreRef().equals(store))
|
||||
{
|
||||
throw new LuceneIndexException("Create child relationship failed - node is not in the required store");
|
||||
}
|
||||
reindex(relationshipRef.getChildRef(), true);
|
||||
}
|
||||
catch (LuceneIndexException e)
|
||||
@@ -309,6 +325,14 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
||||
try
|
||||
{
|
||||
// TODO: Optimise
|
||||
if(!relationshipBeforeRef.getChildRef().getStoreRef().equals(store))
|
||||
{
|
||||
throw new LuceneIndexException("Update child relationship failed - node is not in the required store");
|
||||
}
|
||||
if(!relationshipAfterRef.getChildRef().getStoreRef().equals(store))
|
||||
{
|
||||
throw new LuceneIndexException("Update child relationship failed - node is not in the required store");
|
||||
}
|
||||
if (relationshipBeforeRef.getParentRef() != null)
|
||||
{
|
||||
// reindex(relationshipBeforeRef.getParentRef());
|
||||
@@ -331,6 +355,10 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
||||
checkAbleToDoWork(IndexUpdateStatus.SYNCRONOUS);
|
||||
try
|
||||
{
|
||||
if(!relationshipRef.getChildRef().getStoreRef().equals(store))
|
||||
{
|
||||
throw new LuceneIndexException("Delete child relationship failed - node is not in the required store");
|
||||
}
|
||||
// TODO: Optimise
|
||||
if (relationshipRef.getParentRef() != null)
|
||||
{
|
||||
@@ -547,8 +575,8 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
||||
paths.addAll(categoryPaths);
|
||||
|
||||
Document xdoc = new Document();
|
||||
xdoc.add(new Field("ID", nodeRef.toString(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
xdoc.add(new Field("TX", nodeStatus.getChangeTxnId(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
xdoc.add(new Field("ID", nodeRef.toString(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
xdoc.add(new Field("TX", nodeStatus.getChangeTxnId(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
boolean isAtomic = true;
|
||||
for (QName propertyName : properties.keySet())
|
||||
{
|
||||
@@ -604,9 +632,9 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
||||
qNameBuffer.append(";/");
|
||||
}
|
||||
qNameBuffer.append(ISO9075.getXPathName(qNameRef.getQName()));
|
||||
xdoc.add(new Field("PARENT", tenantService.getName(qNameRef.getParentRef()).toString(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
xdoc.add(new Field("PARENT", tenantService.getName(qNameRef.getParentRef()).toString(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
xdoc.add(new Field("ASSOCTYPEQNAME", ISO9075.getXPathName(qNameRef.getTypeQName()), Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
|
||||
xdoc.add(new Field("LINKASPECT", (pair.getSecond() == null) ? "" : ISO9075.getXPathName(pair.getSecond()), Field.Store.YES, Field.Index.UN_TOKENIZED,
|
||||
xdoc.add(new Field("LINKASPECT", (pair.getSecond() == null) ? "" : ISO9075.getXPathName(pair.getSecond()), Field.Store.YES, Field.Index.NO_NORMS,
|
||||
Field.TermVector.NO));
|
||||
}
|
||||
}
|
||||
@@ -625,17 +653,17 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
||||
if (directPaths.contains(pair.getFirst()))
|
||||
{
|
||||
Document directoryEntry = new Document();
|
||||
directoryEntry.add(new Field("ID", nodeRef.toString(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
directoryEntry.add(new Field("ID", nodeRef.toString(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
directoryEntry.add(new Field("PATH", pathString, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
|
||||
for (NodeRef parent : getParents(pair.getFirst()))
|
||||
{
|
||||
directoryEntry.add(new Field("ANCESTOR", tenantService.getName(parent).toString(), Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
directoryEntry.add(new Field("ANCESTOR", tenantService.getName(parent).toString(), Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
}
|
||||
directoryEntry.add(new Field("ISCONTAINER", "T", Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
directoryEntry.add(new Field("ISCONTAINER", "T", Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
|
||||
if (isCategory(getDictionaryService().getType(nodeService.getType(nodeRef))))
|
||||
{
|
||||
directoryEntry.add(new Field("ISCATEGORY", "T", Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
directoryEntry.add(new Field("ISCATEGORY", "T", Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
}
|
||||
|
||||
docs.add(directoryEntry);
|
||||
@@ -649,12 +677,12 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
||||
if (isRoot)
|
||||
{
|
||||
// TODO: Does the root element have a QName?
|
||||
xdoc.add(new Field("ISCONTAINER", "T", Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
xdoc.add(new Field("ISCONTAINER", "T", Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
xdoc.add(new Field("PATH", "", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
|
||||
xdoc.add(new Field("QNAME", "", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
|
||||
xdoc.add(new Field("ISROOT", "T", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
xdoc.add(new Field("ISROOT", "T", Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
xdoc.add(new Field("PRIMARYASSOCTYPEQNAME", ISO9075.getXPathName(ContentModel.ASSOC_CHILDREN), Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
|
||||
xdoc.add(new Field("ISNODE", "T", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
xdoc.add(new Field("ISNODE", "T", Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
docs.add(xdoc);
|
||||
|
||||
}
|
||||
@@ -666,31 +694,31 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
||||
// true));
|
||||
|
||||
ChildAssociationRef primary = nodeService.getPrimaryParent(nodeRef);
|
||||
xdoc.add(new Field("PRIMARYPARENT", tenantService.getName(primary.getParentRef()).toString(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
xdoc.add(new Field("PRIMARYPARENT", tenantService.getName(primary.getParentRef()).toString(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
xdoc.add(new Field("PRIMARYASSOCTYPEQNAME", ISO9075.getXPathName(primary.getTypeQName()), Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
|
||||
QName typeQName = nodeService.getType(nodeRef);
|
||||
|
||||
xdoc.add(new Field("TYPE", ISO9075.getXPathName(typeQName), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
xdoc.add(new Field("TYPE", ISO9075.getXPathName(typeQName), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
for (QName classRef : nodeService.getAspects(nodeRef))
|
||||
{
|
||||
xdoc.add(new Field("ASPECT", ISO9075.getXPathName(classRef), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
xdoc.add(new Field("ASPECT", ISO9075.getXPathName(classRef), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
}
|
||||
|
||||
xdoc.add(new Field("ISROOT", "F", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
xdoc.add(new Field("ISNODE", "T", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
xdoc.add(new Field("ISROOT", "F", Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
xdoc.add(new Field("ISNODE", "T", Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
if (isAtomic || indexAllProperties)
|
||||
{
|
||||
xdoc.add(new Field("FTSSTATUS", "Clean", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
xdoc.add(new Field("FTSSTATUS", "Clean", Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
}
|
||||
else
|
||||
{
|
||||
if (isNew)
|
||||
{
|
||||
xdoc.add(new Field("FTSSTATUS", "New", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
xdoc.add(new Field("FTSSTATUS", "New", Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
}
|
||||
else
|
||||
{
|
||||
xdoc.add(new Field("FTSSTATUS", "Dirty", Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO));
|
||||
xdoc.add(new Field("FTSSTATUS", "Dirty", Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -58,6 +58,7 @@ import org.alfresco.repo.search.QueryParameterDefImpl;
|
||||
import org.alfresco.repo.search.QueryRegisterComponent;
|
||||
import org.alfresco.repo.search.impl.lucene.analysis.DateTimeAnalyser;
|
||||
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
|
||||
import org.alfresco.repo.search.impl.lucene.index.IndexInfo;
|
||||
import org.alfresco.repo.search.results.ChildAssocRefResultSet;
|
||||
import org.alfresco.repo.search.results.DetachedResultSet;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationComponent;
|
||||
@@ -461,6 +462,49 @@ public class ADMLuceneTest extends TestCase
|
||||
super(arg0);
|
||||
}
|
||||
|
||||
public void testOverWritetoZeroSize() throws Exception
|
||||
{
|
||||
testTX.commit();
|
||||
testTX = transactionService.getUserTransaction();
|
||||
testTX.begin();
|
||||
luceneFTS.pause();
|
||||
buildBaseIndex();
|
||||
runBaseTests();
|
||||
luceneFTS.resume();
|
||||
testTX.commit();
|
||||
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
testTX = transactionService.getUserTransaction();
|
||||
testTX.begin();
|
||||
runBaseTests();
|
||||
nodeService.setProperty(rootNodeRef, QName.createQName("{namespace}property-A"), "A");
|
||||
nodeService.setProperty(n1, QName.createQName("{namespace}property-A"), "A");
|
||||
nodeService.setProperty(n2, QName.createQName("{namespace}property-A"), "A");
|
||||
nodeService.setProperty(n3, QName.createQName("{namespace}property-A"), "A");
|
||||
nodeService.setProperty(n4, QName.createQName("{namespace}property-A"), "A");
|
||||
nodeService.setProperty(n5, QName.createQName("{namespace}property-A"), "A");
|
||||
nodeService.setProperty(n6, QName.createQName("{namespace}property-A"), "A");
|
||||
nodeService.setProperty(n7, QName.createQName("{namespace}property-A"), "A");
|
||||
runBaseTests();
|
||||
testTX.commit();
|
||||
|
||||
testTX = transactionService.getUserTransaction();
|
||||
testTX.begin();
|
||||
runBaseTests();
|
||||
nodeService.setProperty(n8, QName.createQName("{namespace}property-A"), "A");
|
||||
nodeService.setProperty(n9, QName.createQName("{namespace}property-A"), "A");
|
||||
nodeService.setProperty(n10, QName.createQName("{namespace}property-A"), "A");
|
||||
nodeService.setProperty(n11, QName.createQName("{namespace}property-A"), "A");
|
||||
nodeService.setProperty(n12, QName.createQName("{namespace}property-A"), "A");
|
||||
nodeService.setProperty(n13, QName.createQName("{namespace}property-A"), "A");
|
||||
nodeService.setProperty(n14, QName.createQName("{namespace}property-A"), "A");
|
||||
runBaseTests();
|
||||
testTX.commit();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Test bug fix
|
||||
*
|
||||
@@ -1538,7 +1582,7 @@ public class ADMLuceneTest extends TestCase
|
||||
sp7.addStore(rootNodeRef.getStoreRef());
|
||||
sp7.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
sp7.setQuery("PATH:\"//.\"");
|
||||
sp7.addSort("@" + createdDate, true);
|
||||
sp7.addSort("@" + createdDate.getPrefixedQName(namespacePrefixResolver), true);
|
||||
results = searcher.query(sp7);
|
||||
|
||||
Date date = null;
|
||||
@@ -1573,7 +1617,7 @@ public class ADMLuceneTest extends TestCase
|
||||
date = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
|
||||
SearchParameters sp_7 = new SearchParameters();
|
||||
sp_7.addStore(rootNodeRef.getStoreRef());
|
||||
sp_7.setLanguage(SearchService.LANGUAGE_LUCENE);
|
||||
@@ -1613,7 +1657,6 @@ public class ADMLuceneTest extends TestCase
|
||||
date = currentBun;
|
||||
}
|
||||
results.close();
|
||||
|
||||
|
||||
// sort by double
|
||||
|
||||
@@ -2571,8 +2614,7 @@ public class ADMLuceneTest extends TestCase
|
||||
{
|
||||
String startDate = df.format(new Date(testDate.getTime() - i));
|
||||
// System.out.println("\tStart = " + startDate);
|
||||
|
||||
|
||||
|
||||
String endDate = df.format(new Date(testDate.getTime() + i));
|
||||
// System.out.println("\tEnd = " + endDate);
|
||||
|
||||
|
@@ -1071,7 +1071,7 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a backup of the source directory via a temporary folder
|
||||
* Makes a backup of the source directory via a temporary folder.
|
||||
*/
|
||||
private void backupDirectory(File sourceDir, File tempDir, File targetDir) throws Exception
|
||||
{
|
||||
@@ -1111,6 +1111,14 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Note files can alter due to background processes so file not found is Ok
|
||||
*
|
||||
* @param srcDir
|
||||
* @param destDir
|
||||
* @param preserveFileDate
|
||||
* @throws IOException
|
||||
*/
|
||||
private void copyDirectory(File srcDir, File destDir, boolean preserveFileDate) throws IOException
|
||||
{
|
||||
if (destDir.exists())
|
||||
@@ -1125,6 +1133,7 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI
|
||||
}
|
||||
if (preserveFileDate)
|
||||
{
|
||||
// OL if file not found so does not need to check
|
||||
destDir.setLastModified(srcDir.lastModified());
|
||||
}
|
||||
}
|
||||
@@ -1134,83 +1143,100 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI
|
||||
}
|
||||
|
||||
File[] files = srcDir.listFiles();
|
||||
if (files == null)
|
||||
if (files != null)
|
||||
{
|
||||
throw new IOException(" No Access to " + srcDir);
|
||||
}
|
||||
for (int i = 0; i < files.length; i++)
|
||||
{
|
||||
File currentCopyTarget = new File(destDir, files[i].getName());
|
||||
if (files[i].isDirectory())
|
||||
for (int i = 0; i < files.length; i++)
|
||||
{
|
||||
// Skip any temp index file
|
||||
if (files[i].getName().equals(tempDir.getName()))
|
||||
File currentCopyTarget = new File(destDir, files[i].getName());
|
||||
if (files[i].isDirectory())
|
||||
{
|
||||
// skip any temp back up directories
|
||||
}
|
||||
else if (files[i].getName().equals(targetDir.getName()))
|
||||
{
|
||||
// skip any back up directories
|
||||
// Skip any temp index file
|
||||
if (files[i].getName().equals(tempDir.getName()))
|
||||
{
|
||||
// skip any temp back up directories
|
||||
}
|
||||
else if (files[i].getName().equals(targetDir.getName()))
|
||||
{
|
||||
// skip any back up directories
|
||||
}
|
||||
else
|
||||
{
|
||||
copyDirectory(files[i], currentCopyTarget, preserveFileDate);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
copyDirectory(files[i], currentCopyTarget, preserveFileDate);
|
||||
copyFile(files[i], currentCopyTarget, preserveFileDate);
|
||||
}
|
||||
}
|
||||
else
|
||||
}
|
||||
else
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
copyFile(files[i], currentCopyTarget, preserveFileDate);
|
||||
logger.debug("Skipping transient directory " + srcDir);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void copyFile(File srcFile, File destFile, boolean preserveFileDate) throws IOException
|
||||
{
|
||||
if (destFile.exists())
|
||||
{
|
||||
throw new IOException("File shoud not exist " + destFile);
|
||||
}
|
||||
|
||||
FileInputStream input = new FileInputStream(srcFile);
|
||||
try
|
||||
{
|
||||
FileOutputStream output = new FileOutputStream(destFile);
|
||||
if (destFile.exists())
|
||||
{
|
||||
throw new IOException("File shoud not exist " + destFile);
|
||||
}
|
||||
|
||||
FileInputStream input = new FileInputStream(srcFile);
|
||||
try
|
||||
{
|
||||
copy(input, output);
|
||||
FileOutputStream output = new FileOutputStream(destFile);
|
||||
try
|
||||
{
|
||||
copy(input, output);
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
output.close();
|
||||
}
|
||||
catch (IOException io)
|
||||
{
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
output.close();
|
||||
input.close();
|
||||
}
|
||||
catch (IOException io)
|
||||
{
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
input.close();
|
||||
}
|
||||
catch (IOException io)
|
||||
{
|
||||
|
||||
// check copy
|
||||
if (srcFile.length() != destFile.length())
|
||||
{
|
||||
throw new IOException("Failed to copy full from '" + srcFile + "' to '" + destFile + "'");
|
||||
}
|
||||
if (preserveFileDate)
|
||||
{
|
||||
destFile.setLastModified(srcFile.lastModified());
|
||||
}
|
||||
}
|
||||
|
||||
// check copy
|
||||
if (srcFile.length() != destFile.length())
|
||||
catch (FileNotFoundException fnfe)
|
||||
{
|
||||
throw new IOException("Failed to copy full from '" + srcFile + "' to '" + destFile + "'");
|
||||
}
|
||||
if (preserveFileDate)
|
||||
{
|
||||
destFile.setLastModified(srcFile.lastModified());
|
||||
// ignore as files can go
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Skipping transient file " + srcFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1247,7 +1273,7 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI
|
||||
for (int i = 0; i < files.length; i++)
|
||||
{
|
||||
File file = files[i];
|
||||
|
||||
|
||||
if (file.isDirectory())
|
||||
{
|
||||
deleteDirectory(file);
|
||||
@@ -1392,6 +1418,11 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI
|
||||
{
|
||||
return indexer.doWithWriteLock(lockWork);
|
||||
}
|
||||
|
||||
public boolean canRetry()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static class CoreLockWork<R> implements IndexInfo.LockWork<R>
|
||||
@@ -1430,21 +1461,31 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public boolean canRetry()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public boolean canRetry()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws IOException
|
||||
{
|
||||
// delete a directory ....
|
||||
if(args.length != 1)
|
||||
if (args.length != 1)
|
||||
{
|
||||
return;
|
||||
}
|
||||
File file = new File(args[0]);
|
||||
deleteDirectory(file);
|
||||
}
|
||||
|
||||
|
||||
public static void deleteDirectory(File directory) throws IOException
|
||||
{
|
||||
if (!directory.exists())
|
||||
@@ -1465,9 +1506,9 @@ public abstract class AbstractLuceneIndexerAndSearcherFactory implements LuceneI
|
||||
for (int i = 0; i < files.length; i++)
|
||||
{
|
||||
File file = files[i];
|
||||
|
||||
|
||||
System.out.println(".");
|
||||
//System.out.println("Deleting "+file.getCanonicalPath());
|
||||
// System.out.println("Deleting "+file.getCanonicalPath());
|
||||
if (file.isDirectory())
|
||||
{
|
||||
deleteDirectory(file);
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -36,5 +36,7 @@ public interface ReferenceCounting
|
||||
|
||||
public void setInvalidForReuse() throws IOException;
|
||||
|
||||
public boolean isInvalidForReuse();
|
||||
|
||||
public String getId();
|
||||
}
|
@@ -202,5 +202,10 @@ public class ReferenceCountingReadOnlyIndexReaderFactory
|
||||
{
|
||||
return id;
|
||||
}
|
||||
|
||||
public boolean isInvalidForReuse()
|
||||
{
|
||||
return invalidForReuse;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -46,7 +46,7 @@ public enum TransactionStatus
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return previous == null;
|
||||
return (previous == null) || (previous == ACTIVE);
|
||||
}
|
||||
|
||||
public int getStatus()
|
||||
@@ -77,7 +77,7 @@ public enum TransactionStatus
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return previous.allowsRollbackOrMark(previous);
|
||||
return previous.allowsRollbackOrMark(previous) || (previous == MARKED_ROLLBACK);
|
||||
}
|
||||
|
||||
public int getStatus()
|
||||
@@ -108,7 +108,7 @@ public enum TransactionStatus
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return previous == TransactionStatus.PREPARING;
|
||||
return (previous == TransactionStatus.PREPARING) || (previous == PREPARED);
|
||||
}
|
||||
|
||||
public int getStatus()
|
||||
@@ -139,7 +139,7 @@ public enum TransactionStatus
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return previous == TransactionStatus.COMMITTING;
|
||||
return (previous == TransactionStatus.COMMITTING) || (previous == COMMITTED);
|
||||
}
|
||||
|
||||
public int getStatus()
|
||||
@@ -170,7 +170,7 @@ public enum TransactionStatus
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return previous == TransactionStatus.ROLLINGBACK;
|
||||
return (previous == TransactionStatus.ROLLINGBACK) || (previous == ROLLEDBACK);
|
||||
}
|
||||
|
||||
public int getStatus()
|
||||
@@ -201,7 +201,7 @@ public enum TransactionStatus
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return false;
|
||||
return (previous == UNKNOWN);
|
||||
}
|
||||
|
||||
public int getStatus()
|
||||
@@ -232,7 +232,7 @@ public enum TransactionStatus
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return false;
|
||||
return (previous == NO_TRANSACTION);
|
||||
}
|
||||
|
||||
public int getStatus()
|
||||
@@ -263,7 +263,7 @@ public enum TransactionStatus
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return previous == TransactionStatus.ACTIVE;
|
||||
return (previous == TransactionStatus.ACTIVE) || (previous == PREPARING);
|
||||
}
|
||||
|
||||
public int getStatus()
|
||||
@@ -294,7 +294,7 @@ public enum TransactionStatus
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return previous == TransactionStatus.PREPARED;
|
||||
return (previous == TransactionStatus.PREPARED) || (previous == COMMITTING);
|
||||
}
|
||||
|
||||
public int getStatus()
|
||||
@@ -325,7 +325,7 @@ public enum TransactionStatus
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return previous.allowsRollbackOrMark(previous);
|
||||
return previous.allowsRollbackOrMark(previous) || (previous == ROLLINGBACK);
|
||||
}
|
||||
|
||||
public int getStatus()
|
||||
@@ -356,7 +356,7 @@ public enum TransactionStatus
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return false;
|
||||
return (previous == MERGE);
|
||||
}
|
||||
|
||||
public int getStatus()
|
||||
@@ -387,7 +387,7 @@ public enum TransactionStatus
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return false;
|
||||
return (previous == MERGE_TARGET);
|
||||
}
|
||||
|
||||
public int getStatus()
|
||||
@@ -419,7 +419,7 @@ public enum TransactionStatus
|
||||
|
||||
public boolean follows(TransactionStatus previous)
|
||||
{
|
||||
return false;
|
||||
return (previous == COMMITTED_DELETING);
|
||||
}
|
||||
|
||||
public int getStatus()
|
||||
|
Reference in New Issue
Block a user