001: /*
002: Very Quick Wiki - WikiWikiWeb clone
003: Copyright (C) 2001-2002 Gareth Cronin
004:
005: This program is free software; you can redistribute it and/or modify
006: it under the terms of the latest version of the GNU Lesser General
007: Public License as published by the Free Software Foundation;
008:
009: This program is distributed in the hope that it will be useful,
010: but WITHOUT ANY WARRANTY; without even the implied warranty of
011: MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
012: GNU Lesser General Public License for more details.
013:
014: You should have received a copy of the GNU Lesser General Public License
015: along with this program (gpl.txt); if not, write to the Free Software
016: Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
017: */
018: package vqwiki.servlets;
019:
020: import java.text.NumberFormat;
021: import java.util.ArrayList;
022: import java.util.Collection;
023: import java.util.Collections;
024: import java.util.Iterator;
025: import java.util.List;
026: import java.util.Vector;
027:
028: import javax.servlet.http.HttpServletRequest;
029: import javax.servlet.http.HttpServletResponse;
030:
031: import org.apache.log4j.Logger;
032:
033: import vqwiki.Environment;
034: import vqwiki.PseudoTopicHandler;
035: import vqwiki.WikiBase;
036: import vqwiki.servlets.beans.SitemapBean;
037: import vqwiki.servlets.beans.SitemapLineBean;
038: import vqwiki.servlets.beans.StatisticsVWikiBean;
039: import vqwiki.utils.Utilities;
040:
041: /**
042: * This servlet provides some general statisitcs for the usage of wiki
043: *
044: * This class was created on 09:34:30 19.07.2003
045: *
046: * @author $Author: wrh2 $
047: */
048: public class SitemapServlet extends LongLastingOperationServlet {
049:
050: /** Logging */
051: private static final Logger logger = Logger
052: .getLogger(SitemapServlet.class);
053: public static final String LAST_IN_LIST = "e";
054: public static final String MORE_TO_COME = "x";
055: public static final String HORIZ_LINE = "a";
056: public static final String NOTHING = "s";
057: private StatisticsVWikiBean vwikis;
058: private int allWikiSize;
059: private int allWikiCount;
060: private int numPages;
061: private int pageCount;
062:
063: /**
064: * Constructor
065: *
066: *
067: */
068: public SitemapServlet() {
069: super ();
070: }
071:
072: /**
073: * Handle post request.
074: * Generate a RSS feed and send it back as XML.
075: *
076: * @param request The current http request
077: * @param response What the servlet will send back as response
078: *
079: * @throws ServletException If something goes wrong during servlet execution
080: * @throws IOException If the output stream cannot be accessed
081: *
082: */
083: public void run() {
084: vwikis = new StatisticsVWikiBean();
085: NumberFormat nf = NumberFormat.getInstance(locale);
086: nf.setMaximumFractionDigits(2);
087: nf.setMinimumFractionDigits(1);
088: Collection allWikis;
089: try {
090: allWikis = WikiBase.getInstance().getVirtualWikiList();
091: } catch (Exception e) {
092: allWikis = Collections.EMPTY_LIST;
093: }
094: if (!allWikis.contains(WikiBase.DEFAULT_VWIKI)) {
095: allWikis.add(WikiBase.DEFAULT_VWIKI);
096: }
097: Environment en = Environment.getInstance();
098: String endString = Utilities.resource("topic.ismentionedon",
099: locale);
100: allWikiCount = 0;
101: allWikiSize = allWikis.size();
102: numPages = 0;
103: for (Iterator iterator = allWikis.iterator(); iterator
104: .hasNext(); allWikiCount++) {
105: pageCount = 0;
106: setProgress();
107: String currentWiki = (String) iterator.next();
108: List sitemapLines = new ArrayList();
109: Vector visitedPages = new Vector();
110: try {
111: numPages = WikiBase.getInstance()
112: .getSearchEngineInstance().getAllTopicNames(
113: currentWiki).size();
114: } catch (Exception e1) {
115: numPages = 1;
116: }
117: // get starting point
118: String startTopic = en
119: .getStringSetting(Environment.PROPERTY_DEFAULT_TOPIC);
120: if (startTopic == null || startTopic.length() < 2) {
121: startTopic = "StartingPoints";
122: }
123: List startingList = new ArrayList(1);
124: startingList.add(LAST_IN_LIST);
125: parsePages(currentWiki, startTopic, startingList, "1",
126: sitemapLines, visitedPages, endString);
127: SitemapBean onewiki = new SitemapBean();
128: onewiki.setName(currentWiki);
129: onewiki.setPages(sitemapLines);
130: vwikis.getVwiki().add(onewiki);
131: }
132: progress = PROGRESS_DONE;
133: }
134:
135: /**
136: * Set the progress
137: * @param allWikiCount Current wiki, we are processing
138: * @param allWikiSize Number of wikis (overall)
139: * @param pageCount Current page we are processing
140: * @param pageSize Number of pages of this wiki
141: */
142: private void setProgress() {
143: if (numPages == 0)
144: numPages = 1;
145: double one = 100.0 / (double) allWikiSize;
146: progress = Math.min(
147: (int) ((double) allWikiCount * one + (double) pageCount
148: * one / (double) numPages), 99);
149: }
150:
151: /**
152: * We are done. Go to result page.
153: * @see vqwiki.servlets.LongLastingOperationServlet#dispatchDone(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)
154: */
155: protected void dispatchDone(HttpServletRequest request,
156: HttpServletResponse response) {
157: request.setAttribute("virtualwikis", vwikis);
158: dispatch("/jsp/sitemap.jsp", request, response);
159: }
160:
161: /**
162: * Parse the pages starting with startTopic. The results
163: * are stored in the list sitemapLines. This functions is
164: * called recursivly, but the list is filled in the
165: * correct order.
166: *
167: * @param currentWiki name of the wiki to refer to
168: * @param startTopic Start with this page
169: * @param level A list indicating the images to use to represent certain levels
170: * @param group The group, we are representing
171: * @param sitemapLines A list of all lines, which results in the sitemap
172: * @param visitedPages A vector of all pages, which already have been visited
173: * @param endString Beyond this text we do not search for links
174: */
175: private void parsePages(String currentWiki, String topic,
176: List levelsIn, String group, List sitemapLines,
177: Vector visitedPages, String endString) {
178: try {
179: WikiBase base = WikiBase.getInstance();
180: String onepage = base.readCooked(currentWiki, topic);
181: List result = new ArrayList();
182: List levels = new ArrayList(levelsIn.size());
183: for (int i = 0; i < levelsIn.size(); i++) {
184: if ((i + 1) < levelsIn.size()) {
185: if (MORE_TO_COME.equals((String) levelsIn.get(i))) {
186: levels.add(HORIZ_LINE);
187: } else if (LAST_IN_LIST.equals((String) levelsIn
188: .get(i))) {
189: levels.add(NOTHING);
190: } else {
191: levels.add(levelsIn.get(i));
192: }
193: } else {
194: levels.add(levelsIn.get(i));
195: }
196: }
197: if (onepage != null) {
198: // if we are at a page, which was already visited, forget about its children.
199: //if (visitedPages.contains(topic))
200: // return;
201: String searchfor = "href=\"Wiki?";
202: int iPos = onepage.indexOf(searchfor);
203: int iEndPos;
204: if (endString == null || endString.trim().length() == 0) {
205: iEndPos = Integer.MAX_VALUE;
206: } else {
207: iEndPos = onepage.indexOf(endString);
208: if (iEndPos == -1)
209: iEndPos = Integer.MAX_VALUE;
210: }
211: while (iPos > -1 && iPos < iEndPos) {
212: String link = onepage.substring(iPos
213: + searchfor.length(), onepage.indexOf('"',
214: iPos + searchfor.length()));
215: if (link.indexOf('&') > -1) {
216: link = link.substring(0, link.indexOf('&'));
217: }
218: if (link.length() > 3
219: && !link.startsWith("topic=")
220: && !link.startsWith("action=")
221: && !visitedPages.contains(link)
222: && !PseudoTopicHandler.getInstance()
223: .isPseudoTopic(link)) {
224: result.add(link);
225: visitedPages.add(link);
226: }
227: iPos = onepage.indexOf(searchfor, iPos + 10);
228: }
229: // add a sitemap line
230: SitemapLineBean slb = new SitemapLineBean();
231: slb.setTopic(topic);
232: slb.setLevels(new ArrayList(levels));
233: slb.setGroup(group);
234: slb.setHasChildren(result.size() > 0);
235: sitemapLines.add(slb);
236: pageCount++;
237: setProgress();
238: for (int i = 0; i < result.size(); i++) {
239: String link = (String) result.get(i);
240: String newGroup = group + "_" + String.valueOf(i);
241: boolean isLast = ((i + 1) == result.size());
242: if (isLast) {
243: levels.add(LAST_IN_LIST);
244: } else {
245: levels.add(MORE_TO_COME);
246: }
247: parsePages(currentWiki, link, levels, newGroup,
248: sitemapLines, visitedPages, endString);
249: levels.remove(levels.size() - 1);
250: }
251: }
252: } catch (Exception e) {
253: logger.fatal("Exception", e);
254: }
255: }
256: }
257:
258: /*
259: * Log:
260: *
261: * $Log$
262: * Revision 1.7 2006/04/23 06:36:56 wrh2
263: * Coding style updates (VQW-73).
264: *
265: * Revision 1.6 2003/10/05 05:07:32 garethc
266: * fixes and admin file encoding option + merge with contributions
267: *
268: * Revision 1.5 2003/07/23 13:45:19 mrgadget4711
269: * ADD: progress information
270: *
271: * Revision 1.4 2003/07/23 00:34:26 mrgadget4711
272: * ADD: Long lasting operations
273: *
274: * Revision 1.3 2003/07/21 20:58:37 mrgadget4711
275: * ADD: Dynamically open / close subtrees in IE (using DHTML)
276: *
277: * Revision 1.2 2003/07/21 09:19:39 mrgadget4711
278: * Fixes
279: *
280: * Revision 1.1 2003/07/20 20:34:40 mrgadget4711
281: * ADD: Sitemap
282: *
283: * Revision 1.1 2003/07/19 13:22:59 mrgadget4711
284: * ADD: Statistic capabilities
285: *
286: * ------------END------------
287: */
|