001: // Copyright (C) 2003,2004,2005 by Object Mentor, Inc. All rights reserved.
002: // Released under the terms of the GNU General Public License version 2 or later.
003:
004: package fitnesse.wiki;
005:
006: import junit.framework.TestCase;
007: import fitnesse.testutil.SimpleCachinePage;
008:
009: public class CachingPageTest extends TestCase {
010: private CachingPage root;
011:
012: private PageCrawler crawler;
013:
014: private WikiPagePath pageOnePath;
015:
016: private WikiPagePath childOnePath;
017:
018: private WikiPagePath rootPath;
019:
020: public void setUp() throws Exception {
021: root = new SimpleCachinePage("RooT", null);
022: crawler = root.getPageCrawler();
023: pageOnePath = PathParser.parse(".PageOne");
024: childOnePath = PathParser.parse(".PageOne.ChildOne");
025: rootPath = PathParser.parse("root");
026: }
027:
028: public void testCreate() throws Exception {
029: String alpha = "AlphaAlpha";
030: WikiPage root = InMemoryPage.makeRoot("root");
031: assertFalse(root.hasChildPage(alpha));
032:
033: crawler.addPage(root, PathParser.parse(alpha), "content");
034: assertTrue(root.hasChildPage(alpha));
035: }
036:
037: public void testTwoLevel() throws Exception {
038: String alpha = "AlphaAlpha";
039: String beta = "BetaBeta";
040: WikiPage subPage1 = crawler.addPage(root, PathParser
041: .parse(alpha));
042: crawler.addPage(subPage1, PathParser.parse(beta));
043: assertTrue(crawler.pageExists(root, PathParser.parse(alpha
044: + "." + beta)));
045:
046: }
047:
048: public void testDoubleDot() throws Exception {
049: String alpha = "AlphaAlpha";
050: String beta = "BetaBeta";
051: WikiPage subPage1 = crawler.addPage(root, PathParser
052: .parse(alpha));
053: crawler.addPage(subPage1, PathParser.parse(beta));
054: assertFalse(crawler.pageExists(root, PathParser.parse(alpha
055: + ".." + beta)));
056:
057: }
058:
059: public void testClearPage() throws Exception {
060: String child = "ChildPage";
061: crawler.addPage(root, PathParser.parse(child), "content");
062: assertTrue(root.hasCachedSubpage(child));
063: root.removeChildPage(child);
064: assertFalse(root.hasCachedSubpage(child));
065: }
066:
067: // TODO MdM - trying to get rid of getParent()
068: // public void testGetParent() throws Exception
069: // {
070: // WikiPage child1 = crawler.addPage(root, "ChildOne", "ChildOne");
071: // WikiPage child2 = crawler.addPage(child1, "ChildTwo", "ChildTwo");
072: //
073: // assertSame(child1, child2.getParent());
074: // assertSame(root, child1.getParent());
075: // assertEquals(root, root.getParent());
076: // }
077:
078: public void testGetName() throws Exception {
079: WikiPage frontPage = crawler.addPage(root, PathParser
080: .parse("FrontPage"), "FrontPage");
081: WikiPage c1 = crawler.addPage(frontPage, PathParser
082: .parse("ChildOne"), "ChildOne");
083: assertEquals("ChildOne", c1.getName());
084: assertEquals(PathParser.parse("FrontPage.ChildOne"), crawler
085: .getFullPath(c1));
086: }
087:
088: public void testDefaultAttributes() throws Exception {
089: WikiPage page = crawler.addPage(root, PathParser
090: .parse("SomePage"));
091: assertTrue(page.getData().hasAttribute(WikiPage.ACTION_EDIT));
092: }
093:
094: public void testPageDataIsCached() throws Exception {
095: CachingPage.cacheTime = 100;
096: CachingPage page = (CachingPage) crawler.addPage(root,
097: PathParser.parse("PageOne"), "some content");
098:
099: PageData data1 = page.getCachedData();
100: PageData data2 = page.getCachedData();
101: Thread.sleep(200);
102:
103: PageData data3 = page.getData();
104:
105: assertSame(data1, data2);
106: assertNotSame(data1, data3);
107: }
108:
109: public void testDumpCachedExpiredData() throws Exception {
110: CachingPage.cacheTime = 100;
111: CachingPage page = (CachingPage) crawler.addPage(root,
112: PathParser.parse("PageOne"), "some content");
113: PageData data = page.getData();
114: assertNotNull(data);
115: Thread.sleep(200);
116: ((CachingPage) page).dumpExpiredCachedData();
117: assertNull(page.getCachedData());
118: }
119:
120: public void testGetPageThatStartsWithDot() throws Exception {
121: WikiPage page1 = crawler.addPage(root, PathParser
122: .parse("PageOne"), "page one");
123: WikiPage child1 = crawler.addPage(root, PathParser
124: .parse("PageOne.ChildOne"), "child one");
125: assertSame(page1, crawler.getPage(page1, pageOnePath));
126: assertSame(child1, crawler.getPage(page1, childOnePath));
127: assertSame(page1, crawler.getPage(child1, pageOnePath));
128: }
129:
130: public void testGetPageUsingRootKeyWord() throws Exception {
131: WikiPage page1 = crawler.addPage(root, PathParser
132: .parse("PageOne"), "page one");
133: assertSame(root, crawler.getPage(page1, rootPath));
134: assertSame(root, crawler.getPage(root, rootPath));
135: }
136:
137: public void testEquals() throws Exception {
138: WikiPage root = InMemoryPage.makeRoot("RooT");
139: WikiPage pageOne = crawler.addPage(root, PathParser
140: .parse("PageOne"), "content");
141: assertEquals(pageOne, pageOne);
142:
143: root.removeChildPage("PageOne");
144: WikiPage pageOneOne = crawler.addPage(root, PathParser
145: .parse("PageOne"));
146: assertEquals(pageOne, pageOneOne);
147: }
148:
149: public void testCachedDataIsTrashedBeforeOutOfMemoryError()
150: throws Exception {
151: CachingPage page = (CachingPage) crawler.addPage(root,
152: PathParser.parse("SomePage"), "some content");
153: page.getData();
154: assertTrue(page.getCachedData() != null);
155: boolean exceptionThrown = false;
156: try {
157: new MemoryEater();
158: } catch (OutOfMemoryError e) {
159: assertTrue(page.getCachedData() == null);
160: exceptionThrown = true;
161: }
162: assertTrue(exceptionThrown);
163: }
164:
165: class MemoryEater {
166: long[] array = new long[1000000];
167:
168: MemoryEater eater = new MemoryEater();
169: }
170: }
|