001: // Copyright (C) 2003,2004,2005 by Object Mentor, Inc. All rights reserved.
002: // Released under the terms of the GNU General Public License version 2 or later.
003: package fitnesse.wiki;
004:
005: import junit.framework.TestCase;
006: import fitnesse.components.FitNesseTraversalListener;
007: import java.util.*;
008:
009: public class PageCrawlerTest extends TestCase implements
010: FitNesseTraversalListener {
011: private WikiPage root;
012: private WikiPage page1;
013: private WikiPage page2;
014: private WikiPage child1;
015: private WikiPage child2;
016: private WikiPage grandChild1;
017: private PageCrawler crawler;
018: private WikiPagePath page1Path;
019: private WikiPagePath child1FullPath;
020: private WikiPagePath page2Path;
021: private WikiPagePath grandChild1FullPath;
022:
023: public void setUp() throws Exception {
024: root = InMemoryPage.makeRoot("RooT");
025: crawler = new PageCrawlerImpl();
026:
027: page1Path = PathParser.parse("PageOne");
028: page2Path = PathParser.parse("PageTwo");
029: child1FullPath = PathParser.parse("PageOne.ChildOne");
030: grandChild1FullPath = PathParser
031: .parse("PageOne.ChildOne.GrandChildOne");
032: page1 = crawler.addPage(root, page1Path);
033: page2 = crawler.addPage(root, page2Path);
034: child1 = crawler.addPage(page1, PathParser.parse("ChildOne"));
035: child2 = crawler.addPage(page1, PathParser.parse("ChildTwo"));
036: grandChild1 = crawler.addPage(child1, PathParser
037: .parse("GrandChildOne"));
038: }
039:
040: public void testPageExists() throws Exception {
041: assertTrue(crawler.pageExists(page1, PathParser
042: .parse("ChildOne")));
043: assertFalse(crawler.pageExists(page1, PathParser
044: .parse("BlahBlah")));
045: }
046:
047: public void testPageExistsUsingPath() throws Exception {
048: assertTrue(crawler.pageExists(page1, PathParser
049: .parse("ChildOne")));
050: assertTrue(crawler.pageExists(root, child1FullPath));
051: assertTrue(crawler.pageExists(root, grandChild1FullPath));
052: assertTrue(crawler.pageExists(root, PathParser
053: .parse(".PageOne")));
054: assertTrue(crawler.pageExists(root, PathParser
055: .parse(".PageOne.ChildOne.GrandChildOne")));
056:
057: assertFalse(crawler.pageExists(page1, PathParser
058: .parse("BlahBlah")));
059: assertFalse(crawler.pageExists(page1, PathParser
060: .parse("PageOne.BlahBlah")));
061: }
062:
063: public void testGetPage() throws Exception {
064: assertEquals(null, crawler.getPage(page1, page1Path));
065: assertEquals(page1, crawler.getPage(root, page1Path));
066: assertEquals(page2, crawler.getPage(root, page2Path));
067: assertEquals(page1, crawler.getPage(page1, PathParser
068: .parse(".PageOne")));
069: assertEquals(page1, crawler.getPage(grandChild1, PathParser
070: .parse(".PageOne")));
071: assertEquals(grandChild1, crawler.getPage(page1, PathParser
072: .parse("ChildOne.GrandChildOne")));
073: assertEquals(root, crawler.getPage(root, PathParser
074: .parse("root")));
075: assertEquals(root, crawler.getPage(root, PathParser.parse(".")));
076: assertEquals(root, crawler.getPage(root, PathParser.parse("")));
077: }
078:
079: public void testGetSiblingPage() throws Exception {
080: assertEquals(page2, crawler.getSiblingPage(page1, page2Path));
081: assertEquals(child1, crawler.getSiblingPage(page1, PathParser
082: .parse(">ChildOne")));
083: assertEquals(child2, crawler.getSiblingPage(grandChild1,
084: PathParser.parse("<PageOne.ChildTwo")));
085: }
086:
087: public void testGetFullPath() throws Exception {
088: assertEquals(page1Path, crawler.getFullPath(page1));
089: assertEquals(page2Path, crawler.getFullPath(page2));
090: assertEquals(child1FullPath, crawler.getFullPath(child1));
091: assertEquals(grandChild1FullPath, crawler
092: .getFullPath(grandChild1));
093: assertEquals(PathParser.parse(""), crawler.getFullPath(root));
094: }
095:
096: public void testGetAbsolutePathForChild() throws Exception {
097: WikiPagePath somePagePath = PathParser.parse("SomePage");
098: WikiPagePath somePageFullPath = crawler.getFullPathOfChild(
099: root, somePagePath);
100: assertEquals("SomePage", PathParser.render(somePageFullPath));
101:
102: WikiPagePath pageOnePath = page1Path;
103: WikiPagePath pageOneFullPath = crawler.getFullPathOfChild(root,
104: pageOnePath);
105: assertEquals("PageOne", PathParser.render(pageOneFullPath));
106:
107: WikiPagePath SomePageChildFullPath = crawler
108: .getFullPathOfChild(child1, somePagePath);
109: assertEquals("PageOne.ChildOne.SomePage", PathParser
110: .render(SomePageChildFullPath));
111:
112: WikiPagePath otherPagePath = PathParser
113: .parse("SomePage.OtherPage");
114: WikiPagePath otherPageFullPath = crawler.getFullPathOfChild(
115: root, otherPagePath);
116: assertEquals("SomePage.OtherPage", PathParser
117: .render(otherPageFullPath));
118:
119: WikiPagePath somePageAbsolutePath = PathParser
120: .parse(".SomePage");
121: WikiPagePath somePageAbsoluteFullPath = crawler
122: .getFullPathOfChild(child1, somePageAbsolutePath);
123: assertEquals("SomePage", PathParser
124: .render(somePageAbsoluteFullPath));
125: }
126:
127: public void testAddPage() throws Exception {
128: WikiPage page = crawler.addPage(page1, PathParser
129: .parse("SomePage"));
130: assertEquals(PathParser.parse("PageOne.SomePage"), crawler
131: .getFullPath(page));
132: assertEquals(page1, page.getParent());
133: }
134:
135: public void testRecursiveAddbyName() throws Exception {
136: crawler.addPage(root, PathParser.parse("AaAa"), "its content");
137: assertTrue(root.hasChildPage("AaAa"));
138:
139: crawler.addPage(root, PathParser.parse("AaAa.BbBb"), "floop");
140: assertTrue(crawler.pageExists(root, PathParser
141: .parse("AaAa.BbBb")));
142: assertEquals("floop", crawler.getPage(root,
143: PathParser.parse("AaAa.BbBb")).getData().getContent());
144: }
145:
146: public void testAddChildPageWithMissingParent() throws Exception {
147: WikiPage page = crawler.addPage(root, PathParser
148: .parse("WikiMail.BadSubject0123"), "");
149: assertNotNull(page);
150: assertEquals("BadSubject0123", page.getName());
151: assertEquals(PathParser.parse("WikiMail.BadSubject0123"),
152: crawler.getFullPath(page));
153: }
154:
155: public void testGetRelativePageName() throws Exception {
156: assertEquals("PageOne", crawler.getRelativeName(root, page1));
157: assertEquals("PageOne.ChildOne", crawler.getRelativeName(root,
158: child1));
159: assertEquals("ChildOne", crawler.getRelativeName(page1, child1));
160: assertEquals("GrandChildOne", crawler.getRelativeName(child1,
161: grandChild1));
162: assertEquals("ChildOne.GrandChildOne", crawler.getRelativeName(
163: page1, grandChild1));
164: }
165:
166: public void testIsRoot() throws Exception {
167: assertTrue(crawler.isRoot(root));
168: WikiPage page = crawler.addPage(root, page1Path);
169: assertFalse(crawler.isRoot(page));
170: }
171:
172: Set traversedPages = new HashSet();
173:
174: public void testTraversal() throws Exception {
175: crawler.traverse(root, this );
176: assertEquals(6, traversedPages.size());
177: assertTrue(traversedPages.contains("PageOne"));
178: assertTrue(traversedPages.contains("ChildOne"));
179: }
180:
181: public void processPage(WikiPage page) throws Exception {
182: traversedPages.add(page.getName());
183: }
184:
185: public String getSearchPattern() throws Exception {
186: return "blah";
187: }
188:
189: public void testdoesntTraverseSymbolicPages() throws Exception {
190: PageData data = page1.getData();
191: data.getProperties().set(SymbolicPage.PROPERTY_NAME).set(
192: "SymLink", "PageTwo");
193: page1.commit(data);
194:
195: crawler.traverse(root, this );
196: assertEquals(6, traversedPages.size());
197:
198: assertFalse(traversedPages.contains("SymLink"));
199: }
200: }
|