Merged V4.0-BUG-FIX to HEAD

36915: ALF-12874: Schema reference files are out of date
   - Difference: expected primary key .alf_tenant.PRIMARY.columnNames[0]="id", but was .alf_tenant.PRIMARY.columnNames[0]="tenant_domain"
   - fixed the rename of alf_tenant PK "id" -> "tenant_domain" (for all 5 DB types)
   36950: Merged V4.0 to V4.0-BUG-FIX (RECORD ONLY)
      36917: Merged V4.0-BUG-FIX to V4.0
         36915: ALF-12874: Schema reference files are out of date
         - Difference: expected primary key .alf_tenant.PRIMARY.columnNames[0]="id", but was .alf_tenant.PRIMARY.columnNames[0]="tenant_domain"
         - fixed the rename of alf_tenant PK "id" -> "tenant_domain" (for all 5 DB types)
   36951: Merged V4.0 (4.0.2) to V4.0-BUG-FIX (4.0.3)
      36949: ALF-13745: Merged V3.4-BUG-FIX (3.4.10) to V4.0 (4.0.2)
         36948: ALF-13667 Additional OpenOffice mimetypes to be added to the mime-type maps
            - On reflection the maxSourceSizeKBytes limits for power point files were too small. Did not take into account
              images in the files rather than just text.
      36923: Merged DEV to V4.0
         36600: ALF-14129 : Failed to do upgrade from 3.4.8 to 4.0.2
            Statements from ActivitiTaskIdIndexes.sql script were marked as optional.
      36922: Merged DEV to V4.0
         36729: ALF-14129 : Failed to do upgrade from 3.4.8 to 4.0.2
            Outdated Schema-Reference-ACT.xml were updated for all dialects and regression after ALF-12874 was fixed.
   36953: Merged BRANCHES/DEV/V3.4-BUG-FIX to BRANCHES/DEV/V4.0-BUG-FIX
      36905: ALF-14178 Share - Path issue with number of character limitation. Updated qname to max DB limit of 255 chars.
   36954: ALF-14209 SOLR - does not support query for all stores
   - it is now possible for SOLR to track any store and Alfresco to execute queries against that store (no federation or sharding yet ....)
   36965: Extra debugging after review of ALF-14238
   37032: ALF-12723: Missing mergeinfo for r34655
   37033: Merged V4.0 to V4.0-BUG-FIX
      36999: ALF-5285: Reverse merging r26226, as it causes regressions ALF-14202, ALF-14242 and ALF-14245
      37001: ALF-14169: Alfresco startup fails if XAM module was deployed
         Jan approved fix
      37005: ALF-14169: Fix unit test compilation
      37020: Resolved some "Patch description is not available" warnings in 4.0.2
      37022: ALF-12874: Schema reference files are out of date
      - Fixed up PostgreSQL diffs
      37027: ALF-12874: Schema reference files are out of date
      - DB2 fixes by Dmitry


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@37036 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Dave Ward
2012-05-25 17:19:13 +00:00
parent 18ad785d32
commit ff2ae89c08
22 changed files with 366 additions and 130 deletions

View File

@@ -1058,6 +1058,13 @@
<property name="autoStart"> <property name="autoStart">
<value>false</value> <value>false</value>
</property> </property>
<property name="compositePropertyTypes">
<map>
<entry key="solr.store.mappings">
<value>org.alfresco.repo.search.impl.solr.SolrStoreMapping</value>
</entry>
</map>
</property>
<property name="category"> <property name="category">
<value>Search</value> <value>Search</value>
</property> </property>

View File

@@ -2455,7 +2455,7 @@
</columns> </columns>
<primarykey name="PRIMARY"> <primarykey name="PRIMARY">
<columnnames> <columnnames>
<columnname order="1">id</columnname> <columnname order="1">tenant_domain</columnname>
</columnnames> </columnnames>
</primarykey> </primarykey>
<foreignkeys/> <foreignkeys/>

View File

@@ -377,6 +377,11 @@
<columnname>proc_inst_id_</columnname> <columnname>proc_inst_id_</columnname>
</columnnames> </columnnames>
</index> </index>
<index name="act_idx_hi_detail_task_id" unique="false">
<columnnames>
<columnname>task_id_</columnname>
</columnnames>
</index>
<index name="act_idx_hi_detail_time" unique="false"> <index name="act_idx_hi_detail_time" unique="false">
<columnnames> <columnnames>
<columnname>time_</columnname> <columnname>time_</columnname>
@@ -449,7 +454,7 @@
</primarykey> </primarykey>
<foreignkeys/> <foreignkeys/>
<indexes> <indexes>
<index name="act_hi_procinst_proc_def_id__key" unique="true"> <index name="act_hi_procinst_proc_def_id__business_key__key" unique="true">
<columnnames> <columnnames>
<columnname>proc_def_id_</columnname> <columnname>proc_def_id_</columnname>
<columnname>business_key_</columnname> <columnname>business_key_</columnname>
@@ -898,7 +903,7 @@
</foreignkey> </foreignkey>
</foreignkeys> </foreignkeys>
<indexes> <indexes>
<index name="act_ru_execution_proc_def_id__key" unique="true"> <index name="act_ru_execution_proc_def_id__business_key__key" unique="true">
<columnnames> <columnnames>
<columnname>proc_def_id_</columnname> <columnname>proc_def_id_</columnname>
<columnname>business_key_</columnname> <columnname>business_key_</columnname>
@@ -1311,6 +1316,11 @@
<columnname>proc_inst_id_</columnname> <columnname>proc_inst_id_</columnname>
</columnnames> </columnnames>
</index> </index>
<index name="act_idx_variable_task_id" unique="false">
<columnnames>
<columnname>task_id_</columnname>
</columnnames>
</index>
</indexes> </indexes>
</table> </table>
</objects> </objects>

View File

@@ -2470,7 +2470,7 @@
<autoincrement>false</autoincrement> <autoincrement>false</autoincrement>
</column> </column>
<column name="enabled" order="3"> <column name="enabled" order="3">
<type>bit</type> <type>bool</type>
<nullable>false</nullable> <nullable>false</nullable>
<autoincrement>false</autoincrement> <autoincrement>false</autoincrement>
</column> </column>
@@ -2490,9 +2490,9 @@
<autoincrement>false</autoincrement> <autoincrement>false</autoincrement>
</column> </column>
</columns> </columns>
<primarykey name="PRIMARY"> <primarykey name="alf_tenant_pkey">
<columnnames> <columnnames>
<columnname order="1">id</columnname> <columnname order="1">tenant_domain</columnname>
</columnnames> </columnnames>
</primarykey> </primarykey>
<foreignkeys/> <foreignkeys/>

View File

@@ -719,7 +719,7 @@
</foreignkey> </foreignkey>
</foreignkeys> </foreignkeys>
<indexes> <indexes>
<index name="avm_version_roots_version_id_key" unique="true"> <index name="avm_version_roots_version_id_avm_store_id_key" unique="true">
<columnnames> <columnnames>
<columnname>version_id</columnname> <columnname>version_id</columnname>
<columnname>avm_store_id</columnname> <columnname>avm_store_id</columnname>

View File

@@ -8,10 +8,10 @@
-- --
-- Add index to runtime variable table -- Add index to runtime variable table
create index ACT_IDX_VARIABLE_TASK_ID on ACT_RU_VARIABLE(TASK_ID_); create index ACT_IDX_VARIABLE_TASK_ID on ACT_RU_VARIABLE(TASK_ID_); --(optional)
-- Add index to history variable table -- Add index to history variable table
create index ACT_IDX_HI_DETAIL_TASK_ID on ACT_HI_DETAIL(TASK_ID_); create index ACT_IDX_HI_DETAIL_TASK_ID on ACT_HI_DETAIL(TASK_ID_); --(optional)
-- --
-- Record script finish -- Record script finish

View File

@@ -1,26 +0,0 @@
--
-- Title: Add missing Activiti indexes on task-id for runtime and history variables.
-- Database: MySQL
-- Since: V4.0 Schema 5029
-- Author: Frederik Heremans
--
-- Please contact support@alfresco.com if you need assistance with the upgrade.
--
-- Add index to runtime variable table
create index ACT_IDX_VARIABLE_TASK_ID on ACT_RU_VARIABLE(TASK_ID_);
-- Add index to history variable table
create index ACT_IDX_HI_DETAIL_TASK_ID on ACT_HI_DETAIL(TASK_ID_);
--
-- Record script finish
--
DELETE FROM alf_applied_patch WHERE id = 'patch.db-V4.0-Activiti-task-id-indexes';
INSERT INTO alf_applied_patch
(id, description, fixes_from_schema, fixes_to_schema, applied_to_schema, target_schema, applied_on_date, applied_to_server, was_executed, succeeded, report)
VALUES
(
'patch.db-V4.0-Activiti-task-id-indexes', 'Manually executed script upgrade V4.0: Add missing Activiti indexes on task-id',
0, 6003, -1, 6004, null, 'UNKNOWN', ${TRUE}, ${TRUE}, 'Script completed'
);

View File

@@ -1,26 +0,0 @@
--
-- Title: Add missing Activiti indexes on task-id for runtime and history variables.
-- Database: PostgreSQL
-- Since: V4.0 Schema 5029
-- Author: Frederik Heremans
--
-- Please contact support@alfresco.com if you need assistance with the upgrade.
--
-- Add index to runtime variable table
create index ACT_IDX_VARIABLE_TASK_ID on ACT_RU_VARIABLE(TASK_ID_); --(optional)
-- Add index to history variable table
create index ACT_IDX_HI_DETAIL_TASK_ID on ACT_HI_DETAIL(TASK_ID_); --(optional)
--
-- Record script finish
--
DELETE FROM alf_applied_patch WHERE id = 'patch.db-V4.0-Activiti-task-id-indexes';
INSERT INTO alf_applied_patch
(id, description, fixes_from_schema, fixes_to_schema, applied_to_schema, target_schema, applied_on_date, applied_to_server, was_executed, succeeded, report)
VALUES
(
'patch.db-V4.0-Activiti-task-id-indexes', 'Manually executed script upgrade V4.0: Add missing Activiti indexes on task-id',
0, 6003, -1, 6004, null, 'UNKNOWN', ${TRUE}, ${TRUE}, 'Script completed'
);

View File

@@ -161,6 +161,8 @@ patch.wcmFolders.webprojects.result.created=The Web Projects folder was successf
patch.wcmFolders.webforms.result.exists=The Web Forms folder already exists: {0} patch.wcmFolders.webforms.result.exists=The Web Forms folder already exists: {0}
patch.wcmFolders.webforms.result.created=The Web Forms folder was successfully created: {0} patch.wcmFolders.webforms.result.created=The Web Forms folder was successfully created: {0}
patch.wcmDeployed.description=Adds the 'WCM Deployed' space to the company home folder.
patch.linkNodeExtension.description=Fixes link node file extensions to have a .url extension. patch.linkNodeExtension.description=Fixes link node file extensions to have a .url extension.
patch.linkNodeExtension.result=Fixed {0} link node file extensions. See file {1} for details. patch.linkNodeExtension.result=Fixed {0} link node file extensions. See file {1} for details.
patch.linkNodeExtension.err.unable_to_fix=Auto-fixing of link node file extensions failed. See file {0} for details. patch.linkNodeExtension.err.unable_to_fix=Auto-fixing of link node file extensions failed. See file {0} for details.

View File

@@ -2134,7 +2134,7 @@
</bean> </bean>
<bean id="patch.migrateAttrDelete" class="org.alfresco.repo.admin.patch.impl.NoOpPatch" parent="basePatch"> <bean id="patch.migrateAttrDelete" class="org.alfresco.repo.admin.patch.impl.NoOpPatch" parent="basePatch">
<property name="id"><value>patch.migrateAttrDelete</value></property> <property name="id"><value>patch.migrateAttrDelete</value></property>
<property name="description"><value>patch.migrateAttrDelete.description</value></property> <property name="description"><value>patch.noOpPatch.description</value></property>
<property name="fixesFromSchema"><value>0</value></property> <property name="fixesFromSchema"><value>0</value></property>
<property name="fixesToSchema"><value>4106</value></property> <property name="fixesToSchema"><value>4106</value></property>
<property name="targetSchema"><value>4107</value></property> <property name="targetSchema"><value>4107</value></property>

View File

@@ -637,16 +637,16 @@ content.transformer.OpenOffice.mimeTypeLimits.docx.pdf.maxSourceSizeKBytes=768
content.transformer.OpenOffice.mimeTypeLimits.docm.pdf.maxSourceSizeKBytes=768 content.transformer.OpenOffice.mimeTypeLimits.docm.pdf.maxSourceSizeKBytes=768
content.transformer.OpenOffice.mimeTypeLimits.dotx.pdf.maxSourceSizeKBytes=768 content.transformer.OpenOffice.mimeTypeLimits.dotx.pdf.maxSourceSizeKBytes=768
content.transformer.OpenOffice.mimeTypeLimits.dotm.pdf.maxSourceSizeKBytes=768 content.transformer.OpenOffice.mimeTypeLimits.dotm.pdf.maxSourceSizeKBytes=768
content.transformer.OpenOffice.mimeTypeLimits.ppt.pdf.maxSourceSizeKBytes=2048 content.transformer.OpenOffice.mimeTypeLimits.ppt.pdf.maxSourceSizeKBytes=6144
content.transformer.OpenOffice.mimeTypeLimits.pptx.pdf.maxSourceSizeKBytes=1024 content.transformer.OpenOffice.mimeTypeLimits.pptx.pdf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.mimeTypeLimits.pptm.pdf.maxSourceSizeKBytes=1024 content.transformer.OpenOffice.mimeTypeLimits.pptm.pdf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.mimeTypeLimits.ppsx.pdf.maxSourceSizeKBytes=1024 content.transformer.OpenOffice.mimeTypeLimits.ppsx.pdf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.mimeTypeLimits.ppsm.pdf.maxSourceSizeKBytes=1024 content.transformer.OpenOffice.mimeTypeLimits.ppsm.pdf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.mimeTypeLimits.potx.pdf.maxSourceSizeKBytes=1024 content.transformer.OpenOffice.mimeTypeLimits.potx.pdf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.mimeTypeLimits.potm.pdf.maxSourceSizeKBytes=1024 content.transformer.OpenOffice.mimeTypeLimits.potm.pdf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.mimeTypeLimits.ppam.pdf.maxSourceSizeKBytes=1024 content.transformer.OpenOffice.mimeTypeLimits.ppam.pdf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.mimeTypeLimits.sldx.pdf.maxSourceSizeKBytes=1024 content.transformer.OpenOffice.mimeTypeLimits.sldx.pdf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.mimeTypeLimits.sldm.pdf.maxSourceSizeKBytes=1024 content.transformer.OpenOffice.mimeTypeLimits.sldm.pdf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.mimeTypeLimits.xls.pdf.maxSourceSizeKBytes=10240 content.transformer.OpenOffice.mimeTypeLimits.xls.pdf.maxSourceSizeKBytes=10240
content.transformer.OpenOffice.mimeTypeLimits.xlsx.pdf.maxSourceSizeKBytes=1536 content.transformer.OpenOffice.mimeTypeLimits.xlsx.pdf.maxSourceSizeKBytes=1536
content.transformer.OpenOffice.mimeTypeLimits.xltx.pdf.maxSourceSizeKBytes=1536 content.transformer.OpenOffice.mimeTypeLimits.xltx.pdf.maxSourceSizeKBytes=1536
@@ -662,16 +662,16 @@ content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.docx.swf.maxSourceSizeKByt
content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.docm.swf.maxSourceSizeKBytes=256 content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.docm.swf.maxSourceSizeKBytes=256
content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.dotx.swf.maxSourceSizeKBytes=256 content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.dotx.swf.maxSourceSizeKBytes=256
content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.dotm.swf.maxSourceSizeKBytes=256 content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.dotm.swf.maxSourceSizeKBytes=256
content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.ppt.swf.maxSourceSizeKBytes=768 content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.ppt.swf.maxSourceSizeKBytes=6144
content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.pptx.swf.maxSourceSizeKBytes=512 content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.pptx.swf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.pptm.swf.maxSourceSizeKBytes=512 content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.pptm.swf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.ppsx.swf.maxSourceSizeKBytes=512 content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.ppsx.swf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.ppsm.swf.maxSourceSizeKBytes=512 content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.ppsm.swf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.potx.swf.maxSourceSizeKBytes=512 content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.potx.swf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.potm.swf.maxSourceSizeKBytes=512 content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.potm.swf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.ppam.swf.maxSourceSizeKBytes=512 content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.ppam.swf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.sldx.swf.maxSourceSizeKBytes=512 content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.sldx.swf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.sldm.swf.maxSourceSizeKBytes=512 content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.sldm.swf.maxSourceSizeKBytes=4096
content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.xls.swf.maxSourceSizeKBytes=1024 content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.xls.swf.maxSourceSizeKBytes=1024
content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.xlsx.swf.maxSourceSizeKBytes=1024 content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.xlsx.swf.maxSourceSizeKBytes=1024
content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.xltx.swf.maxSourceSizeKBytes=1024 content.transformer.OpenOffice.Pdf2swf.mimeTypeLimits.xltx.swf.maxSourceSizeKBytes=1024
@@ -918,6 +918,18 @@ solr.solrConnectTimeout=5000
# cron expression defining how often the Solr Admin client (used by JMX) pings Solr if it goes away # cron expression defining how often the Solr Admin client (used by JMX) pings Solr if it goes away
solr.solrPingCronExpression=0 0/5 * * * ? * solr.solrPingCronExpression=0 0/5 * * * ? *
#Default SOLR store mappings mappings
solr.store.mappings=solrMappingAlfresco,solrMappingArchive
solr.store.mappings.value.solrMappingAlfresco.httpClientFactory=solrHttpClientFactory
solr.store.mappings.value.solrMappingAlfresco.baseUrl=/solr/alfresco
solr.store.mappings.value.solrMappingAlfresco.protocol=workspace
solr.store.mappings.value.solrMappingAlfresco.identifier=SpacesStore
solr.store.mappings.value.solrMappingArchive.httpClientFactory=solrHttpClientFactory
solr.store.mappings.value.solrMappingArchive.baseUrl=/solr/archive
solr.store.mappings.value.solrMappingArchive.protocol=archive
solr.store.mappings.value.solrMappingArchive.identifier=SpacesStore
# #
# Web Publishing Properties # Web Publishing Properties
# #

View File

@@ -30,7 +30,6 @@
</bean> </bean>
<bean id="search.solrQueryHTTPCLient" class="org.alfresco.repo.search.impl.solr.SolrQueryHTTPClient" init-method="init"> <bean id="search.solrQueryHTTPCLient" class="org.alfresco.repo.search.impl.solr.SolrQueryHTTPClient" init-method="init">
<property name="httpClientFactory" ref="solrHttpClientFactory"/>
<property name="nodeService"> <property name="nodeService">
<ref bean="mlAwareNodeService" /> <ref bean="mlAwareNodeService" />
</property> </property>
@@ -55,10 +54,50 @@
</map> </map>
</property> </property>
<property name="storeMappings"> <property name="storeMappings">
<map> <ref bean="solr.store.mappings" />
<entry key="workspace://SpacesStore" value="alfresco" /> </property>
<entry key="archive://SpacesStore" value="archive" /> </bean>
</map>
<!--The configurable RPC user mappings - actually a post-processed composite property! -->
<bean id="solr.store.mappings" class="org.springframework.beans.factory.config.ListFactoryBean">
<property name="sourceList">
<list>
<bean class="org.alfresco.repo.search.impl.solr.SolrStoreMapping">
<property name="beanName">
<value>solrMappingAlfresco</value>
</property>
<property name="httpClientFactory">
<value>solrHttpClientFactory</value>
</property>
<property name="baseUrl">
<value>/solr/alfresco</value>
</property>
<property name="protocol">
<value>workspace</value>
</property>
<property name="identifier">
<value>SpacesStore</value>
</property>
</bean>
<bean class="org.alfresco.repo.search.impl.solr.SolrStoreMapping">
<property name="beanName">
<value>solrMappingArchive</value>
</property>
<property name="httpClientFactory">
<value>solrHttpClientFactory</value>
</property>
<property name="baseUrl">
<value>/solr/archive</value>
</property>
<property name="protocol">
<value>archive</value>
</property>
<property name="identifier">
<value>SpacesStore</value>
</property>
</bean>
</list>
</property> </property>
</bean> </bean>

View File

@@ -29,6 +29,8 @@ import org.alfresco.repo.cache.SimpleCache;
import org.alfresco.service.cmr.repository.ContentIOException; import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader; import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentWriter; import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.util.GUID;
import org.alfresco.util.Pair;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@@ -45,7 +47,8 @@ public abstract class AbstractRoutingContentStore implements ContentStore
{ {
private static Log logger = LogFactory.getLog(AbstractRoutingContentStore.class); private static Log logger = LogFactory.getLog(AbstractRoutingContentStore.class);
private SimpleCache<String, ContentStore> storesByContentUrl; private String instanceKey = GUID.generate();
private SimpleCache<Pair<String, String>, ContentStore> storesByContentUrl;
private ReadLock storesCacheReadLock; private ReadLock storesCacheReadLock;
private WriteLock storesCacheWriteLock; private WriteLock storesCacheWriteLock;
@@ -59,7 +62,7 @@ public abstract class AbstractRoutingContentStore implements ContentStore
/** /**
* @param storesCache cache of stores used to access URLs * @param storesCache cache of stores used to access URLs
*/ */
public void setStoresCache(SimpleCache<String, ContentStore> storesCache) public void setStoresCache(SimpleCache<Pair<String, String>, ContentStore> storesCache)
{ {
this.storesByContentUrl = storesCache; this.storesByContentUrl = storesCache;
} }
@@ -89,11 +92,12 @@ public abstract class AbstractRoutingContentStore implements ContentStore
*/ */
private ContentStore selectReadStore(String contentUrl) private ContentStore selectReadStore(String contentUrl)
{ {
Pair<String, String> cacheKey = new Pair<String, String>(instanceKey, contentUrl);
storesCacheReadLock.lock(); storesCacheReadLock.lock();
try try
{ {
// Check if the store is in the cache // Check if the store is in the cache
ContentStore store = storesByContentUrl.get(contentUrl); ContentStore store = storesByContentUrl.get(cacheKey);
if (store != null) if (store != null)
{ {
// We found a store that was previously used // We found a store that was previously used
@@ -127,7 +131,7 @@ public abstract class AbstractRoutingContentStore implements ContentStore
try try
{ {
// Double check // Double check
ContentStore store = storesByContentUrl.get(contentUrl); ContentStore store = storesByContentUrl.get(cacheKey);
if (store != null && store.exists(contentUrl)) if (store != null && store.exists(contentUrl))
{ {
// We found a store and can use it // We found a store and can use it
@@ -169,7 +173,7 @@ public abstract class AbstractRoutingContentStore implements ContentStore
// We found one // We found one
store = storeInList; store = storeInList;
// Put the value in the cache // Put the value in the cache
storesByContentUrl.put(contentUrl, store); storesByContentUrl.put(cacheKey, store);
break; break;
} }
// Check if the content URL was supported // Check if the content URL was supported
@@ -326,6 +330,7 @@ public abstract class AbstractRoutingContentStore implements ContentStore
public ContentWriter getWriter(ContentContext context) throws ContentIOException public ContentWriter getWriter(ContentContext context) throws ContentIOException
{ {
String contentUrl = context.getContentUrl(); String contentUrl = context.getContentUrl();
Pair<String, String> cacheKey = new Pair<String, String>(instanceKey, contentUrl);
if (contentUrl != null) if (contentUrl != null)
{ {
// Check to see if it is in the cache // Check to see if it is in the cache
@@ -333,7 +338,7 @@ public abstract class AbstractRoutingContentStore implements ContentStore
try try
{ {
// Check if the store is in the cache // Check if the store is in the cache
ContentStore store = storesByContentUrl.get(contentUrl); ContentStore store = storesByContentUrl.get(cacheKey);
if (store != null) if (store != null)
{ {
throw new ContentExistsException(this, contentUrl); throw new ContentExistsException(this, contentUrl);
@@ -370,11 +375,12 @@ public abstract class AbstractRoutingContentStore implements ContentStore
} }
ContentWriter writer = store.getWriter(context); ContentWriter writer = store.getWriter(context);
String newContentUrl = writer.getContentUrl(); String newContentUrl = writer.getContentUrl();
Pair<String, String> newCacheKey = new Pair<String, String>(instanceKey, newContentUrl);
// Cache the store against the URL // Cache the store against the URL
storesCacheWriteLock.lock(); storesCacheWriteLock.lock();
try try
{ {
storesByContentUrl.put(newContentUrl, store); storesByContentUrl.put(newCacheKey, store);
} }
finally finally
{ {

View File

@@ -32,6 +32,7 @@ import org.alfresco.repo.content.filestore.FileContentStore;
import org.alfresco.service.cmr.repository.ContentIOException; import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader; import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentWriter; import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.util.Pair;
import org.alfresco.util.TempFileProvider; import org.alfresco.util.TempFileProvider;
/** /**
@@ -160,7 +161,7 @@ public class RoutingContentStoreTest extends AbstractWritableContentStoreTest
Cache ehCache = new Cache("RandomRoutingContentStore", 50, false, true, 0L, 0L); Cache ehCache = new Cache("RandomRoutingContentStore", 50, false, true, 0L, 0L);
CacheManager cacheManager = new CacheManager(); CacheManager cacheManager = new CacheManager();
cacheManager.addCache(ehCache); cacheManager.addCache(ehCache);
EhCacheAdapter<String, ContentStore> cache = new EhCacheAdapter<String, ContentStore>(); EhCacheAdapter<Pair<String,String>, ContentStore> cache = new EhCacheAdapter<Pair<String,String>, ContentStore>();
cache.setCache(ehCache); cache.setCache(ehCache);
super.setStoresCache(cache); super.setStoresCache(cache);
} }

View File

@@ -2636,6 +2636,16 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
catch (Throwable e) catch (Throwable e)
{ {
controlDAO.rollbackToSavepoint(savepoint); controlDAO.rollbackToSavepoint(savepoint);
if (isDebugEnabled)
{
logger.debug(
"Failed to insert node association: \n" +
" sourceNodeId: " + sourceNodeId + "\n" +
" targetNodeId: " + targetNodeId + "\n" +
" assocTypeQName: " + assocTypeQName + "\n" +
" assocIndex: " + assocIndex,
e);
}
throw new AssociationExistsException(sourceNodeId, targetNodeId, assocTypeQName); throw new AssociationExistsException(sourceNodeId, targetNodeId, assocTypeQName);
} }
} }

View File

@@ -23,6 +23,8 @@ import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.Reader; import java.io.Reader;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.Map; import java.util.Map;
@@ -35,6 +37,7 @@ import org.alfresco.repo.search.impl.lucene.LuceneQueryParserException;
import org.alfresco.repo.search.impl.lucene.SolrJSONResultSet; import org.alfresco.repo.search.impl.lucene.SolrJSONResultSet;
import org.alfresco.repo.tenant.TenantService; import org.alfresco.repo.tenant.TenantService;
import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter; import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.cmr.search.LimitBy; import org.alfresco.service.cmr.search.LimitBy;
import org.alfresco.service.cmr.search.ResultSet; import org.alfresco.service.cmr.search.ResultSet;
@@ -62,12 +65,15 @@ import org.json.JSONArray;
import org.json.JSONException; import org.json.JSONException;
import org.json.JSONObject; import org.json.JSONObject;
import org.json.JSONTokener; import org.json.JSONTokener;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.BeanFactoryAware;
import org.springframework.extensions.surf.util.I18NUtil; import org.springframework.extensions.surf.util.I18NUtil;
/** /**
* @author Andy * @author Andy
*/ */
public class SolrQueryHTTPClient public class SolrQueryHTTPClient implements BeanFactoryAware
{ {
static Log s_logger = LogFactory.getLog(SolrQueryHTTPClient.class); static Log s_logger = LogFactory.getLog(SolrQueryHTTPClient.class);
@@ -79,16 +85,16 @@ public class SolrQueryHTTPClient
private Map<String, String> languageMappings; private Map<String, String> languageMappings;
private Map<String, String> storeMappings; private List<SolrStoreMapping> storeMappings;
private String baseUrl; private HashMap<StoreRef, HttpClient> httpClients = new HashMap<StoreRef, HttpClient>();
private HttpClient httpClient; private HashMap<StoreRef, SolrStoreMapping> mappingLookup = new HashMap<StoreRef, SolrStoreMapping>();
private HttpClientFactory httpClientFactory;
private RepositoryState repositoryState; private RepositoryState repositoryState;
private BeanFactory beanFactory;
public SolrQueryHTTPClient() public SolrQueryHTTPClient()
{ {
} }
@@ -96,21 +102,23 @@ public class SolrQueryHTTPClient
public void init() public void init()
{ {
PropertyCheck.mandatory(this, "NodeService", nodeService); PropertyCheck.mandatory(this, "NodeService", nodeService);
PropertyCheck.mandatory(this, "PermissionService", nodeService); PropertyCheck.mandatory(this, "PermissionService", permissionService);
PropertyCheck.mandatory(this, "TenantService", nodeService); PropertyCheck.mandatory(this, "TenantService", tenantService);
PropertyCheck.mandatory(this, "LanguageMappings", nodeService); PropertyCheck.mandatory(this, "LanguageMappings", languageMappings);
PropertyCheck.mandatory(this, "StoreMappings", nodeService); PropertyCheck.mandatory(this, "StoreMappings", storeMappings);
PropertyCheck.mandatory(this, "HttpClientFactory", nodeService); PropertyCheck.mandatory(this, "RepositoryState", repositoryState);
PropertyCheck.mandatory(this, "RepositoryState", nodeService);
StringBuilder sb = new StringBuilder(); for(SolrStoreMapping mapping : storeMappings)
sb.append("/solr"); {
this.baseUrl = sb.toString(); mappingLookup.put(mapping.getStoreRef(), mapping);
httpClient = httpClientFactory.getHttpClient(); HttpClientFactory httpClientFactory = (HttpClientFactory)beanFactory.getBean(mapping.getHttpClientFactory());
HttpClient httpClient = httpClientFactory.getHttpClient();
HttpClientParams params = httpClient.getParams(); HttpClientParams params = httpClient.getParams();
params.setBooleanParameter(HttpClientParams.PREEMPTIVE_AUTHENTICATION, true); params.setBooleanParameter(HttpClientParams.PREEMPTIVE_AUTHENTICATION, true);
httpClient.getState().setCredentials(new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT), new UsernamePasswordCredentials("admin", "admin")); httpClient.getState().setCredentials(new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT), new UsernamePasswordCredentials("admin", "admin"));
httpClients.put(mapping.getStoreRef(), httpClient);
}
} }
/** /**
@@ -121,11 +129,6 @@ public class SolrQueryHTTPClient
this.repositoryState = repositoryState; this.repositoryState = repositoryState;
} }
public void setHttpClientFactory(HttpClientFactory httpClientFactory)
{
this.httpClientFactory = httpClientFactory;
}
public void setNodeService(NodeService nodeService) public void setNodeService(NodeService nodeService)
{ {
this.nodeService = nodeService; this.nodeService = nodeService;
@@ -146,7 +149,7 @@ public class SolrQueryHTTPClient
this.languageMappings = languageMappings; this.languageMappings = languageMappings;
} }
public void setStoreMappings(Map<String, String> storeMappings) public void setStoreMappings(List storeMappings)
{ {
this.storeMappings = storeMappings; this.storeMappings = storeMappings;
} }
@@ -160,19 +163,24 @@ public class SolrQueryHTTPClient
try try
{ {
URLCodec encoder = new URLCodec();
StringBuilder url = new StringBuilder();
url.append(baseUrl);
if (searchParameters.getStores().size() == 0) if (searchParameters.getStores().size() == 0)
{ {
throw new AlfrescoRuntimeException("No store for query"); throw new AlfrescoRuntimeException("No store for query");
} }
String storeUrlFragment = storeMappings.get(searchParameters.getStores().get(0).toString());
if (storeUrlFragment == null) StoreRef store = searchParameters.getStores().get(0);
SolrStoreMapping mapping = mappingLookup.get(store);
if (mapping == null)
{ {
throw new AlfrescoRuntimeException("No solr query support for store " + searchParameters.getStores().get(0).toString()); throw new AlfrescoRuntimeException("No solr query support for store " + searchParameters.getStores().get(0).toString());
} }
url.append("/").append(storeUrlFragment);
URLCodec encoder = new URLCodec();
StringBuilder url = new StringBuilder();
url.append(mapping.getBaseUrl());
String languageUrlFragment = languageMappings.get(language); String languageUrlFragment = languageMappings.get(language);
if (languageUrlFragment == null) if (languageUrlFragment == null)
{ {
@@ -341,6 +349,13 @@ public class SolrQueryHTTPClient
try try
{ {
HttpClient httpClient = httpClients.get(store);
if(httpClient == null)
{
throw new AlfrescoRuntimeException("No http client for store " + searchParameters.getStores().get(0).toString());
}
httpClient.executeMethod(post); httpClient.executeMethod(post);
if(post.getStatusCode() == HttpStatus.SC_MOVED_PERMANENTLY || post.getStatusCode() == HttpStatus.SC_MOVED_TEMPORARILY) if(post.getStatusCode() == HttpStatus.SC_MOVED_PERMANENTLY || post.getStatusCode() == HttpStatus.SC_MOVED_TEMPORARILY)
@@ -395,4 +410,13 @@ public class SolrQueryHTTPClient
} }
} }
/* (non-Javadoc)
* @see org.springframework.beans.factory.BeanFactoryAware#setBeanFactory(org.springframework.beans.factory.BeanFactory)
*/
@Override
public void setBeanFactory(BeanFactory beanFactory) throws BeansException
{
this.beanFactory = beanFactory;
}
} }

View File

@@ -0,0 +1,150 @@
/*
* Copyright (C) 2005-2010 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.repo.search.impl.solr;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.util.PropertyCheck;
import org.springframework.beans.factory.BeanNameAware;
import org.springframework.beans.factory.InitializingBean;
/**
* @author Andy
*
*/
public class SolrStoreMapping implements BeanNameAware
{
StoreRef storeRef;
String httpClientFactory;
String baseUrl;
String protocol;
String identifier;
private String beanName;
public SolrStoreMapping()
{
}
public SolrStoreMapping(String protocol, String identifier, String httpClientFactory, String baseUrl)
{
this.protocol = protocol;
this.identifier = identifier;
this.httpClientFactory = httpClientFactory;
this.baseUrl = baseUrl;
}
/**
* @return the storeRef
*/
public StoreRef getStoreRef()
{
return storeRef;
}
/**
* @return the protocol
*/
public String getProtocol()
{
return protocol;
}
/**
* @param protocol the protocol to set
*/
public void setProtocol(String protocol)
{
this.protocol = protocol;
setStoreRef();
}
/**
* @return the identifier
*/
public String getIdentifier()
{
return identifier;
}
/**
* @param identifier the identifier to set
*/
public void setIdentifier(String identifier)
{
this.identifier = identifier;
setStoreRef();
}
/**
* @return the httpClientFactory
*/
public String getHttpClientFactory()
{
return httpClientFactory;
}
/**
* @param httpClientFactory the httpClientFactory to set
*/
public void setHttpClientFactory(String httpClientFactory)
{
this.httpClientFactory = httpClientFactory;
}
/**
* @return the baseUrl
*/
public String getBaseUrl()
{
return baseUrl;
}
/**
* @param baseUrl the baseUrl to set
*/
public void setBaseUrl(String baseUrl)
{
this.baseUrl = baseUrl;
}
/* (non-Javadoc)
* @see org.springframework.beans.factory.BeanNameAware#setBeanName(java.lang.String)
*/
@Override
public void setBeanName(String beanName)
{
this.beanName = beanName;
}
private void setStoreRef()
{
if((protocol != null) && (identifier != null))
{
this.storeRef = new StoreRef(protocol, identifier);
}
}
}

View File

@@ -2688,10 +2688,13 @@ public class JBPMEngine extends AlfrescoBpmEngine implements WorkflowEngine
{ {
Map<String, Object> tokenVars = varMap.getVariablesLocally(); Map<String, Object> tokenVars = varMap.getVariablesLocally();
for (Map.Entry<String, Object> entry : tokenVars.entrySet()) for (Map.Entry<String, Object> entry : tokenVars.entrySet())
{
if (!vars.containsKey(entry.getKey()))
{ {
vars.put(entry.getKey(), entry.getValue()); vars.put(entry.getKey(), entry.getValue());
} }
} }
}
token = token.getParent(); token = token.getParent();
} }
} }

View File

@@ -236,7 +236,8 @@ public class ExportDb
} }
// Oracle hack: ignore tables in the recycle bin // Oracle hack: ignore tables in the recycle bin
if (tableName.startsWith("BIN$")) // ALF-14129 fix, check whether schema already contains object with provided name
if (tableName.startsWith("BIN$") || schema.containsByName(tableName))
{ {
continue; continue;
} }

View File

@@ -192,7 +192,8 @@ public class ForeignKey extends AbstractDbObject
{ {
return false; return false;
} }
if (!getTargetTable().equals(otherFK.getTargetTable())) // ALF-14129 fix, make table names case insensitive
if (!getTargetTable().equalsIgnoreCase(otherFK.getTargetTable()))
{ {
return false; return false;
} }

View File

@@ -118,5 +118,10 @@ public class ForeignKeyTest extends DbObjectTestBase<ForeignKey>
thisFK = new ForeignKey(parent, "the_fk", "local_col", "target_table", "target_col"); thisFK = new ForeignKey(parent, "the_fk", "local_col", "target_table", "target_col");
thatFK = new ForeignKey(parent, "the_fk", "local_col", "target_table", "target_col2"); thatFK = new ForeignKey(parent, "the_fk", "local_col", "target_table", "target_col2");
assertFalse("FKs have different target column.", thisFK.sameAs(thatFK)); assertFalse("FKs have different target column.", thisFK.sameAs(thatFK));
// ALF-14129 fix test
thisFK = new ForeignKey(parent, "the_fk", "local_col", "target_table", "target_col");
thatFK = new ForeignKey(parent, "the_fk", "local_col", "TARGET_TABLE", "target_col");
assertTrue("FKs are case sensitive to targetTable's name.", thisFK.sameAs(thatFK));
} }
} }

View File

@@ -192,4 +192,21 @@ public class Schema extends AbstractDbObject implements Iterable<DbObject>
} }
return true; return true;
} }
/*
* ALF-14129 fix, checks whether the schema already contains object with provided name.
* (this method is case insensitive to object's name)
*/
public boolean containsByName(String name)
{
Iterator<DbObject> iterator = iterator();
while (iterator.hasNext())
{
if (iterator.next().getName().equalsIgnoreCase(name))
{
return true;
}
}
return false;
}
} }