mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-08-14 17:58:59 +00:00
ALF-9157 Finish wiki webscript unit tests, and switch the lucene based JS webscripts to de-lucene Java ones
git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@29613 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
@@ -203,14 +203,18 @@ public abstract class AbstractWikiWebScript extends DeclarativeWebScript
|
||||
protected Map<String, Object> renderWikiPage(WikiPageInfo page)
|
||||
{
|
||||
Map<String, Object> res = new HashMap<String, Object>();
|
||||
res.put("page", page);
|
||||
res.put("node", page.getNodeRef());
|
||||
res.put("page", new ScriptNode(page.getNodeRef(), serviceRegistry));
|
||||
res.put("name", page.getSystemName());
|
||||
res.put("title", page.getTitle());
|
||||
res.put("contents", page.getContents());
|
||||
res.put("tags", page.getTags());
|
||||
|
||||
// Both forms used for dates
|
||||
res.put("createdOn", page.getCreatedAt());
|
||||
res.put("modifiedOn", page.getModifiedAt());
|
||||
res.put("tags", page.getTags());
|
||||
res.put("created", page.getCreatedAt());
|
||||
res.put("modified", page.getModifiedAt());
|
||||
|
||||
// FTL needs a script node of the people
|
||||
res.put("createdBy", buildPerson(page.getCreator()));
|
||||
@@ -265,7 +269,7 @@ public abstract class AbstractWikiWebScript extends DeclarativeWebScript
|
||||
|
||||
|
||||
// Get the site short name. Try quite hard to do so...
|
||||
String siteName = templateVars.get("site");
|
||||
String siteName = templateVars.get("siteId");
|
||||
if(siteName == null)
|
||||
{
|
||||
siteName = req.getParameter("site");
|
||||
@@ -278,6 +282,10 @@ public abstract class AbstractWikiWebScript extends DeclarativeWebScript
|
||||
{
|
||||
siteName = json.getString("siteid");
|
||||
}
|
||||
else if(json.has("siteId"))
|
||||
{
|
||||
siteName = json.getString("siteId");
|
||||
}
|
||||
else if(json.has("site"))
|
||||
{
|
||||
siteName = json.getString("site");
|
||||
@@ -299,11 +307,12 @@ public abstract class AbstractWikiWebScript extends DeclarativeWebScript
|
||||
throw new WebScriptException(Status.STATUS_NOT_FOUND, error);
|
||||
}
|
||||
|
||||
// Link name is optional
|
||||
String linkName = templateVars.get("path");
|
||||
// Page name is optional
|
||||
// Note - it's really the Name, even if it's called "Title"
|
||||
String pageName = templateVars.get("pageTitle");
|
||||
|
||||
// Have the real work done
|
||||
return executeImpl(site, linkName, req, json, status, cache);
|
||||
return executeImpl(site, pageName, req, json, status, cache);
|
||||
}
|
||||
|
||||
protected abstract Map<String, Object> executeImpl(SiteInfo site,
|
||||
|
120
source/java/org/alfresco/repo/web/scripts/wiki/WikiPageGet.java
Normal file
120
source/java/org/alfresco/repo/web/scripts/wiki/WikiPageGet.java
Normal file
@@ -0,0 +1,120 @@
|
||||
/*
|
||||
* Copyright (C) 2005-2011 Alfresco Software Limited.
|
||||
*
|
||||
* This file is part of Alfresco
|
||||
*
|
||||
* Alfresco is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Lesser General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Alfresco is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
package org.alfresco.repo.web.scripts.wiki;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.alfresco.query.PagingRequest;
|
||||
import org.alfresco.query.PagingResults;
|
||||
import org.alfresco.repo.wiki.WikiServiceImpl;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.site.SiteInfo;
|
||||
import org.alfresco.service.cmr.wiki.WikiPageInfo;
|
||||
import org.json.JSONObject;
|
||||
import org.springframework.extensions.webscripts.Cache;
|
||||
import org.springframework.extensions.webscripts.Status;
|
||||
import org.springframework.extensions.webscripts.WebScriptRequest;
|
||||
|
||||
/**
|
||||
* This class is the controller for the wiki page listing page.get webscript.
|
||||
*
|
||||
* @author Nick Burch
|
||||
* @since 4.0
|
||||
*/
|
||||
public class WikiPageGet extends AbstractWikiWebScript
|
||||
{
|
||||
// For matching links. Not the best pattern ever...
|
||||
private static final Pattern LINK_PATTERN = Pattern.compile("\\[\\[([^\\|\\]]+)");
|
||||
|
||||
@Override
|
||||
protected Map<String, Object> executeImpl(SiteInfo site, String pageName,
|
||||
WebScriptRequest req, JSONObject json, Status status, Cache cache) {
|
||||
Map<String, Object> model = new HashMap<String, Object>();
|
||||
|
||||
// Try to find the page
|
||||
WikiPageInfo page = wikiService.getWikiPage(site.getShortName(), pageName);
|
||||
if(page == null)
|
||||
{
|
||||
String message = "The Wiki Page could not be found";
|
||||
status.setCode(Status.STATUS_NOT_FOUND);
|
||||
status.setMessage(message);
|
||||
|
||||
// Grab the container, used in permissions checking
|
||||
NodeRef container = siteService.getContainer(
|
||||
site.getShortName(), WikiServiceImpl.WIKI_COMPONENT
|
||||
);
|
||||
model.put("container", container);
|
||||
model.put("error", message);
|
||||
|
||||
// Bail out
|
||||
Map<String, Object> result = new HashMap<String, Object>();
|
||||
result.put("result", model);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
// Identify all the internal page links, valid and not
|
||||
// TODO This may be a candidate for the service in future
|
||||
List<String> links = new ArrayList<String>();
|
||||
if(page.getContents() != null)
|
||||
{
|
||||
Matcher m = LINK_PATTERN.matcher(page.getContents());
|
||||
while(m.find())
|
||||
{
|
||||
String link = m.group(1);
|
||||
if(! links.contains(link))
|
||||
{
|
||||
links.add(link);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Get the list of pages, needed for link matching apparently
|
||||
PagingRequest paging = new PagingRequest(MAX_QUERY_ENTRY_COUNT);
|
||||
PagingResults<WikiPageInfo> pages = wikiService.listWikiPages(site.getShortName(), paging);
|
||||
|
||||
List<String> pageNames = new ArrayList<String>();
|
||||
for (WikiPageInfo p : pages.getPage())
|
||||
{
|
||||
pageNames.add(p.getSystemName());
|
||||
}
|
||||
|
||||
|
||||
// All done
|
||||
model.put("page", page);
|
||||
model.put("node", page.getNodeRef());
|
||||
model.put("container", page.getContainerNodeRef());
|
||||
model.put("links", links);
|
||||
model.put("pageList", pageNames);
|
||||
model.put("tags", page.getTags());
|
||||
model.put("siteId", site.getShortName());
|
||||
model.put("site", site);
|
||||
|
||||
// Double wrap
|
||||
Map<String, Object> result = new HashMap<String, Object>();
|
||||
result.put("result", model);
|
||||
return result;
|
||||
}
|
||||
}
|
@@ -26,7 +26,6 @@ import java.util.Map;
|
||||
|
||||
import org.alfresco.query.PagingRequest;
|
||||
import org.alfresco.query.PagingResults;
|
||||
import org.alfresco.repo.jscript.ScriptNode;
|
||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||
import org.alfresco.repo.wiki.WikiServiceImpl;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
@@ -80,7 +79,7 @@ public class WikiPageListGet extends AbstractWikiWebScript
|
||||
|
||||
int days = RECENT_SEARCH_PERIOD_DAYS;
|
||||
String daysS = req.getParameter("days");
|
||||
if(daysS != null || daysS.length() > 0)
|
||||
if(daysS != null && daysS.length() > 0)
|
||||
{
|
||||
days = Integer.parseInt(daysS);
|
||||
}
|
||||
@@ -149,10 +148,13 @@ public class WikiPageListGet extends AbstractWikiWebScript
|
||||
}
|
||||
|
||||
// All done
|
||||
Map<String, Object> wiki = new HashMap<String, Object>();
|
||||
wiki.put("pages", items); // Old style
|
||||
wiki.put("container", container);
|
||||
|
||||
Map<String, Object> model = new HashMap<String, Object>();
|
||||
model.put("data", data); // New style
|
||||
model.put("pages", items); // Old style
|
||||
model.put("container", new ScriptNode(container, serviceRegistry));
|
||||
model.put("wiki", wiki);
|
||||
model.put("siteId", site.getShortName());
|
||||
model.put("site", site);
|
||||
return model;
|
||||
|
@@ -35,16 +35,17 @@ import org.alfresco.service.cmr.security.PersonService;
|
||||
import org.alfresco.service.cmr.site.SiteInfo;
|
||||
import org.alfresco.service.cmr.site.SiteService;
|
||||
import org.alfresco.service.cmr.site.SiteVisibility;
|
||||
import org.alfresco.service.cmr.wiki.WikiPageInfo;
|
||||
import org.alfresco.service.cmr.wiki.WikiService;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.alfresco.util.ISO8601DateFormat;
|
||||
import org.alfresco.util.PropertyMap;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONObject;
|
||||
import org.springframework.extensions.webscripts.Status;
|
||||
import org.springframework.extensions.webscripts.TestWebScriptServer.GetRequest;
|
||||
import org.springframework.extensions.webscripts.TestWebScriptServer.DeleteRequest;
|
||||
import org.springframework.extensions.webscripts.TestWebScriptServer.GetRequest;
|
||||
import org.springframework.extensions.webscripts.TestWebScriptServer.PostRequest;
|
||||
import org.springframework.extensions.webscripts.TestWebScriptServer.PutRequest;
|
||||
import org.springframework.extensions.webscripts.TestWebScriptServer.Response;
|
||||
@@ -67,6 +68,7 @@ public class WikiRestApiTest extends BaseWebScriptTest
|
||||
private NodeService nodeService;
|
||||
private NodeService internalNodeService;
|
||||
private SiteService siteService;
|
||||
private WikiService wikiService;
|
||||
|
||||
private static final String USER_ONE = "UserOneSecondToo";
|
||||
private static final String USER_TWO = "UserTwoSecondToo";
|
||||
@@ -80,6 +82,7 @@ public class WikiRestApiTest extends BaseWebScriptTest
|
||||
private static final String PAGE_CONTENTS_ONE = "http://google.com/";
|
||||
private static final String PAGE_CONTENTS_TWO = "http://alfresco.com/";
|
||||
private static final String PAGE_CONTENTS_THREE = "http://share.alfresco.com/";
|
||||
private static final String PAGE_CONTENTS_LINK = "Text text [[TestPageOne|P1]] [[Test_Page_Two|P2]] [[Invalid|Invalid]] text";
|
||||
|
||||
private static final String URL_WIKI_BASE = "/slingshot/wiki/page";
|
||||
private static final String URL_WIKI_LIST = URL_WIKI_BASE + "s/" + SITE_SHORT_NAME_WIKI;
|
||||
@@ -104,6 +107,7 @@ public class WikiRestApiTest extends BaseWebScriptTest
|
||||
this.personService = (PersonService)getServer().getApplicationContext().getBean("PersonService");
|
||||
this.nodeService = (NodeService)getServer().getApplicationContext().getBean("NodeService");
|
||||
this.siteService = (SiteService)getServer().getApplicationContext().getBean("SiteService");
|
||||
this.wikiService = (WikiService)getServer().getApplicationContext().getBean("WikiService");
|
||||
this.internalNodeService = (NodeService)getServer().getApplicationContext().getBean("nodeService");
|
||||
|
||||
// Authenticate as user
|
||||
@@ -233,7 +237,7 @@ public class WikiRestApiTest extends BaseWebScriptTest
|
||||
/**
|
||||
* Creates a single wiki page based on the supplied details
|
||||
*/
|
||||
private JSONObject createOrUpdatePage(String title, String contents, int expectedStatus)
|
||||
private JSONObject createOrUpdatePage(String title, String contents, String version, int expectedStatus)
|
||||
throws Exception
|
||||
{
|
||||
String name = title.replace(' ', '_');
|
||||
@@ -243,9 +247,25 @@ public class WikiRestApiTest extends BaseWebScriptTest
|
||||
json.put("title", title);
|
||||
json.put("pagecontent", contents);
|
||||
json.put("tags", "");
|
||||
json.put("forceSave", "true"); // Allow the save as-is
|
||||
json.put("page", "wiki-page"); // TODO Is this really needed?
|
||||
|
||||
if(version == null || "force".equals(version))
|
||||
{
|
||||
// Allow the save as-is, no versioning check
|
||||
json.put("forceSave", "true"); // Allow the save as-is
|
||||
}
|
||||
else
|
||||
{
|
||||
if("none".equals(version))
|
||||
{
|
||||
// No versioning
|
||||
}
|
||||
else
|
||||
{
|
||||
json.put("currentVersion", version);
|
||||
}
|
||||
}
|
||||
|
||||
Response response = sendRequest(new PutRequest(URL_WIKI_UPDATE + name, json.toString(), "application/json"), expectedStatus);
|
||||
if (expectedStatus == Status.STATUS_OK)
|
||||
{
|
||||
@@ -354,7 +374,7 @@ public class WikiRestApiTest extends BaseWebScriptTest
|
||||
|
||||
|
||||
// Create
|
||||
page = createOrUpdatePage(PAGE_TITLE_ONE, PAGE_CONTENTS_ONE, Status.STATUS_OK);
|
||||
page = createOrUpdatePage(PAGE_TITLE_ONE, PAGE_CONTENTS_ONE, null, Status.STATUS_OK);
|
||||
name = PAGE_TITLE_ONE.replace(' ', '_');
|
||||
assertEquals("Incorrect JSON: " + page.toString(), true, page.has("title"));
|
||||
|
||||
@@ -383,7 +403,7 @@ public class WikiRestApiTest extends BaseWebScriptTest
|
||||
|
||||
// Edit
|
||||
// We should get a simple message
|
||||
page = createOrUpdatePage(PAGE_TITLE_ONE, "M"+PAGE_CONTENTS_ONE, Status.STATUS_OK);
|
||||
page = createOrUpdatePage(PAGE_TITLE_ONE, "M"+PAGE_CONTENTS_ONE, null, Status.STATUS_OK);
|
||||
assertEquals(name, page.getString("name"));
|
||||
assertEquals(PAGE_TITLE_ONE, page.getString("title"));
|
||||
assertEquals("M"+PAGE_CONTENTS_ONE, page.getString("pagetext"));
|
||||
@@ -443,7 +463,7 @@ public class WikiRestApiTest extends BaseWebScriptTest
|
||||
|
||||
|
||||
// Create a page
|
||||
page = createOrUpdatePage(PAGE_TITLE_TWO, PAGE_CONTENTS_ONE, Status.STATUS_OK);
|
||||
page = createOrUpdatePage(PAGE_TITLE_TWO, PAGE_CONTENTS_ONE, null, Status.STATUS_OK);
|
||||
name = PAGE_TITLE_TWO.replace(' ', '_');
|
||||
assertEquals("Incorrect JSON: " + page.toString(), true, page.has("title"));
|
||||
|
||||
@@ -475,12 +495,114 @@ public class WikiRestApiTest extends BaseWebScriptTest
|
||||
|
||||
public void testVersioning() throws Exception
|
||||
{
|
||||
// TODO
|
||||
WikiPageInfo wikiInfo;
|
||||
JSONObject page;
|
||||
JSONArray versions;
|
||||
String name;
|
||||
|
||||
// Create a page
|
||||
page = createOrUpdatePage(PAGE_TITLE_TWO, PAGE_CONTENTS_ONE, null, Status.STATUS_OK);
|
||||
name = PAGE_TITLE_TWO.replace(' ', '_');
|
||||
assertEquals("Incorrect JSON: " + page.toString(), true, page.has("title"));
|
||||
|
||||
|
||||
// Check it was versioned by default
|
||||
wikiInfo = wikiService.getWikiPage(SITE_SHORT_NAME_WIKI, name);
|
||||
assertNotNull(wikiInfo);
|
||||
assertEquals(true, nodeService.hasAspect(wikiInfo.getNodeRef(), ContentModel.ASPECT_VERSIONABLE));
|
||||
|
||||
// Check the JSON for versioning
|
||||
page = getPage(name, Status.STATUS_OK);
|
||||
assertEquals("Incorrect JSON: " + page.toString(), true, page.has("versionhistory"));
|
||||
|
||||
versions = page.getJSONArray("versionhistory");
|
||||
assertEquals(1, versions.length());
|
||||
assertEquals("1.0", versions.getJSONObject(0).get("version"));
|
||||
assertEquals(USER_ONE, versions.getJSONObject(0).get("author"));
|
||||
|
||||
|
||||
// Upload a new copy without a version flag, denied
|
||||
createOrUpdatePage(PAGE_TITLE_TWO, "Changed Contents", "none", Status.STATUS_CONFLICT);
|
||||
|
||||
|
||||
// Upload a new copy with the appropriate version, allowed
|
||||
page = createOrUpdatePage(PAGE_TITLE_TWO, "Changed Contents 2", "1.0", Status.STATUS_OK);
|
||||
|
||||
page = getPage(name, Status.STATUS_OK);
|
||||
assertEquals("Incorrect JSON: " + page.toString(), true, page.has("versionhistory"));
|
||||
|
||||
versions = page.getJSONArray("versionhistory");
|
||||
assertEquals(2, versions.length());
|
||||
assertEquals("1.1", versions.getJSONObject(0).get("version"));
|
||||
assertEquals(USER_ONE, versions.getJSONObject(0).get("author"));
|
||||
assertEquals("1.0", versions.getJSONObject(1).get("version"));
|
||||
assertEquals(USER_ONE, versions.getJSONObject(1).get("author"));
|
||||
|
||||
|
||||
// Upload a new copy with the force flag, allowed
|
||||
page = createOrUpdatePage(PAGE_TITLE_TWO, "Changed Contents 3", "force", Status.STATUS_OK);
|
||||
|
||||
page = getPage(name, Status.STATUS_OK);
|
||||
assertEquals("Incorrect JSON: " + page.toString(), true, page.has("versionhistory"));
|
||||
|
||||
versions = page.getJSONArray("versionhistory");
|
||||
assertEquals(3, versions.length());
|
||||
assertEquals("1.2", versions.getJSONObject(0).get("version"));
|
||||
assertEquals(USER_ONE, versions.getJSONObject(0).get("author"));
|
||||
assertEquals("1.1", versions.getJSONObject(1).get("version"));
|
||||
assertEquals(USER_ONE, versions.getJSONObject(1).get("author"));
|
||||
assertEquals("1.0", versions.getJSONObject(2).get("version"));
|
||||
assertEquals(USER_ONE, versions.getJSONObject(2).get("author"));
|
||||
}
|
||||
|
||||
public void testLinks() throws Exception
|
||||
{
|
||||
// TODO
|
||||
JSONObject page;
|
||||
JSONArray links;
|
||||
String name;
|
||||
String name2;
|
||||
|
||||
// Create a page with no links
|
||||
page = createOrUpdatePage(PAGE_TITLE_TWO, PAGE_CONTENTS_TWO, null, Status.STATUS_OK);
|
||||
name = PAGE_TITLE_TWO.replace(' ', '_');
|
||||
assertEquals("Incorrect JSON: " + page.toString(), true, page.has("title"));
|
||||
|
||||
|
||||
// Check, won't have any links shown
|
||||
page = getPage(name, Status.STATUS_OK);
|
||||
assertEquals("Incorrect JSON: " + page.toString(), true, page.has("links"));
|
||||
links = page.getJSONArray("links");
|
||||
assertEquals(0, links.length());
|
||||
|
||||
|
||||
// Create a page with links
|
||||
// Should have links to pages 1 and 2
|
||||
page = createOrUpdatePage(PAGE_TITLE_THREE, PAGE_CONTENTS_LINK, null, Status.STATUS_OK);
|
||||
name2 = PAGE_TITLE_THREE.replace(' ', '_');
|
||||
assertEquals("Incorrect JSON: " + page.toString(), true, page.has("title"));
|
||||
|
||||
// Check
|
||||
page = getPage(name2, Status.STATUS_OK);
|
||||
assertEquals("Incorrect JSON: " + page.toString(), true, page.has("links"));
|
||||
|
||||
links = page.getJSONArray("links");
|
||||
assertEquals(3, links.length());
|
||||
assertEquals(PAGE_TITLE_ONE, links.getString(0));
|
||||
assertEquals(name, links.getString(1));
|
||||
assertEquals("Invalid", links.getString(2));
|
||||
|
||||
|
||||
// Create the 1st page, now change
|
||||
page = createOrUpdatePage(PAGE_TITLE_ONE, PAGE_CONTENTS_ONE, null, Status.STATUS_OK);
|
||||
|
||||
page = getPage(name2, Status.STATUS_OK);
|
||||
assertEquals("Incorrect JSON: " + page.toString(), true, page.has("links"));
|
||||
|
||||
links = page.getJSONArray("links");
|
||||
assertEquals(3, links.length());
|
||||
assertEquals(PAGE_TITLE_ONE, links.getString(0));
|
||||
assertEquals(name, links.getString(1));
|
||||
assertEquals("Invalid", links.getString(2));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -498,8 +620,8 @@ public class WikiRestApiTest extends BaseWebScriptTest
|
||||
|
||||
|
||||
// Add two links to get started with
|
||||
createOrUpdatePage(PAGE_TITLE_ONE, PAGE_CONTENTS_ONE, Status.STATUS_OK);
|
||||
createOrUpdatePage(PAGE_TITLE_TWO, PAGE_CONTENTS_TWO, Status.STATUS_OK);
|
||||
createOrUpdatePage(PAGE_TITLE_ONE, PAGE_CONTENTS_ONE, null, Status.STATUS_OK);
|
||||
createOrUpdatePage(PAGE_TITLE_TWO, PAGE_CONTENTS_TWO, null, Status.STATUS_OK);
|
||||
|
||||
// Check again
|
||||
pages = getPages(null, null);
|
||||
@@ -517,9 +639,9 @@ public class WikiRestApiTest extends BaseWebScriptTest
|
||||
|
||||
// Add a third, which is internal, and created by the other user
|
||||
this.authenticationComponent.setCurrentUser(USER_TWO);
|
||||
JSONObject page3 = createOrUpdatePage(PAGE_TITLE_THREE, PAGE_CONTENTS_THREE, Status.STATUS_OK);
|
||||
JSONObject page3 = createOrUpdatePage(PAGE_TITLE_THREE, PAGE_CONTENTS_THREE, null, Status.STATUS_OK);
|
||||
String name3 = PAGE_TITLE_THREE.replace(' ', '_');
|
||||
createOrUpdatePage(PAGE_TITLE_THREE, "UD"+PAGE_CONTENTS_THREE, Status.STATUS_OK);
|
||||
createOrUpdatePage(PAGE_TITLE_THREE, "UD"+PAGE_CONTENTS_THREE, null, Status.STATUS_OK);
|
||||
this.authenticationComponent.setCurrentUser(USER_ONE);
|
||||
|
||||
|
||||
@@ -551,31 +673,40 @@ public class WikiRestApiTest extends BaseWebScriptTest
|
||||
assertEquals(PAGE_TITLE_THREE, entries.getJSONObject(0).getString("title"));
|
||||
|
||||
|
||||
// Ask for filtering by recent docs
|
||||
// TODO
|
||||
// pages = getPages("recentlyAdded", null);
|
||||
// pages = getPages("recentlyModified", null);
|
||||
// assertEquals(3, pages.getInt("totalPages"));
|
||||
//
|
||||
// entries = pages.getJSONArray("pages");
|
||||
// assertEquals(3, entries.length());
|
||||
// assertEquals(PAGE_TITLE_THREE, entries.getJSONObject(0).getString("title"));
|
||||
// assertEquals(PAGE_TITLE_TWO, entries.getJSONObject(1).getString("title"));
|
||||
// assertEquals(PAGE_TITLE_ONE, entries.getJSONObject(2).getString("title"));
|
||||
//
|
||||
//
|
||||
// // Push the 3rd event back, it'll fall off
|
||||
// pushPageCreatedDateBack(name3, 10);
|
||||
//
|
||||
// pages = getPages("recent", null);
|
||||
// assertEquals(2, pages.getInt("total"));
|
||||
// assertEquals(2, pages.getInt("itemCount"));
|
||||
//
|
||||
// entries = pages.getJSONArray("items");
|
||||
// assertEquals(2, entries.length());
|
||||
// assertEquals(PAGE_TITLE_TWO, entries.getJSONObject(0).getString("title"));
|
||||
// assertEquals(PAGE_TITLE_ONE, entries.getJSONObject(1).getString("title"));
|
||||
|
||||
// Ask for filtering by recently added docs
|
||||
pages = getPages("recentlyAdded", null);
|
||||
assertEquals(3, pages.getInt("totalPages"));
|
||||
|
||||
entries = pages.getJSONArray("pages");
|
||||
assertEquals(3, entries.length());
|
||||
assertEquals(PAGE_TITLE_THREE, entries.getJSONObject(0).getString("title"));
|
||||
assertEquals(PAGE_TITLE_TWO, entries.getJSONObject(1).getString("title"));
|
||||
assertEquals(PAGE_TITLE_ONE, entries.getJSONObject(2).getString("title"));
|
||||
|
||||
// Push one back into the past
|
||||
pushPageCreatedDateBack(name3, 10);
|
||||
|
||||
pages = getPages("recentlyAdded", null);
|
||||
assertEquals(2, pages.getInt("totalPages"));
|
||||
|
||||
entries = pages.getJSONArray("pages");
|
||||
assertEquals(2, entries.length());
|
||||
assertEquals(PAGE_TITLE_TWO, entries.getJSONObject(0).getString("title"));
|
||||
assertEquals(PAGE_TITLE_ONE, entries.getJSONObject(1).getString("title"));
|
||||
|
||||
|
||||
// Now for recently modified ones
|
||||
pages = getPages("recentlyModified", null);
|
||||
assertEquals(3, pages.getInt("totalPages"));
|
||||
|
||||
entries = pages.getJSONArray("pages");
|
||||
assertEquals(3, entries.length());
|
||||
assertEquals(PAGE_TITLE_THREE, entries.getJSONObject(0).getString("title"));
|
||||
assertEquals(PAGE_TITLE_TWO, entries.getJSONObject(1).getString("title"));
|
||||
assertEquals(PAGE_TITLE_ONE, entries.getJSONObject(2).getString("title"));
|
||||
// assertEquals(PAGE_TITLE_THREE, entries.getJSONObject(2).getString("title"));
|
||||
|
||||
|
||||
|
||||
// Now hide the site, and remove the user from it, won't be allowed to see it
|
||||
|
Reference in New Issue
Block a user