Source Code Cross Referenced for RDMSearchProvider.java in  » Portal » Open-Portal » com » sun » jspwiki » search » Java Source Code / Java DocumentationJava Source Code and Java Documentation

Java Source Code / Java Documentation
1. 6.0 JDK Core
2. 6.0 JDK Modules
3. 6.0 JDK Modules com.sun
4. 6.0 JDK Modules com.sun.java
5. 6.0 JDK Modules sun
6. 6.0 JDK Platform
7. Ajax
8. Apache Harmony Java SE
9. Aspect oriented
10. Authentication Authorization
11. Blogger System
12. Build
13. Byte Code
14. Cache
15. Chart
16. Chat
17. Code Analyzer
18. Collaboration
19. Content Management System
20. Database Client
21. Database DBMS
22. Database JDBC Connection Pool
23. Database ORM
24. Development
25. EJB Server geronimo
26. EJB Server GlassFish
27. EJB Server JBoss 4.2.1
28. EJB Server resin 3.1.5
29. ERP CRM Financial
30. ESB
31. Forum
32. GIS
33. Graphic Library
34. Groupware
35. HTML Parser
36. IDE
37. IDE Eclipse
38. IDE Netbeans
39. Installer
40. Internationalization Localization
41. Inversion of Control
42. Issue Tracking
43. J2EE
44. JBoss
45. JMS
46. JMX
47. Library
48. Mail Clients
49. Net
50. Parser
51. PDF
52. Portal
53. Profiler
54. Project Management
55. Report
56. RSS RDF
57. Rule Engine
58. Science
59. Scripting
60. Search Engine
61. Security
62. Sevlet Container
63. Source Control
64. Swing Library
65. Template Engine
66. Test Coverage
67. Testing
68. UML
69. Web Crawler
70. Web Framework
71. Web Mail
72. Web Server
73. Web Services
74. Web Services apache cxf 2.0.1
75. Web Services AXIS2
76. Wiki Engine
77. Workflow Engines
78. XML
79. XML UI
Java
Java Tutorial
Java Open Source
Jar File Download
Java Articles
Java Products
Java by API
Photoshop Tutorials
Maya Tutorials
Flash Tutorials
3ds-Max Tutorials
Illustrator Tutorials
GIMP Tutorials
C# / C Sharp
C# / CSharp Tutorial
C# / CSharp Open Source
ASP.Net
ASP.NET Tutorial
JavaScript DHTML
JavaScript Tutorial
JavaScript Reference
HTML / CSS
HTML CSS Reference
C / ANSI-C
C Tutorial
C++
C++ Tutorial
Ruby
PHP
Python
Python Tutorial
Python Open Source
SQL Server / T-SQL
SQL Server / T-SQL Tutorial
Oracle PL / SQL
Oracle PL/SQL Tutorial
PostgreSQL
SQL / MySQL
MySQL Tutorial
VB.Net
VB.Net Tutorial
Flash / Flex / ActionScript
VBA / Excel / Access / Word
XML
XML Tutorial
Microsoft Office PowerPoint 2007 Tutorial
Microsoft Office Excel 2007 Tutorial
Microsoft Office Word 2007 Tutorial
Java Source Code / Java Documentation » Portal » Open Portal » com.sun.jspwiki.search 
Source Cross Referenced  Class Diagram Java Document (Java Doc) 


001:        /*
002:         * Copyright 2003 Sun Microsystems, Inc. All rights reserved.
003:         *
004:         * Redistribution and use in source and binary forms, with or without
005:         * modification, are permitted provided that the following conditions
006:         * are met:
007:         *
008:         * - Redistributions of source code must retain the above copyright
009:         *   notice, this list of conditions and the following disclaimer.
010:         *
011:         * - Redistribution in binary form must reproduce the above copyright
012:         *   notice, this list of conditions and the following disclaimer in
013:         *   the documentation and/or other materials provided with the
014:         *   distribution.
015:         *
016:         * Neither the name of Sun Microsystems, Inc. or the names of
017:         * contributors may be used to endorse or promote products derived
018:         * from this software without specific prior written permission.
019:         *
020:         * This software is provided "AS IS," without a warranty of any
021:         * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
022:         * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
023:         * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
024:         * EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES
025:         * SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING OR
026:         * DISTRIBUTING THE SOFTWARE OR ITS DERIVATIVES. IN NO EVENT WILL SUN
027:         * OR ITS LICENSORS BE LIABLE FOR ANY LOST REVENUE, PROFIT OR DATA, OR
028:         * FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL OR
029:         * PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE THEORY OF
030:         * LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE SOFTWARE,
031:         * EVEN IF SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
032:         *
033:         * You acknowledge that Software is not designed, licensed or intended
034:         * for use in the design, construction, operation or maintenance of
035:         * any nuclear facility.
036:         */
037:
038:        package com.sun.jspwiki.search;
039:
040:        import java.io.File;
041:        import java.io.IOException;
042:        import java.io.InputStream;
043:        import java.io.InputStreamReader;
044:        import java.io.StringReader;
045:        import java.io.StringWriter;
046:        import java.io.ByteArrayOutputStream;
047:        import java.io.DataOutputStream;
048:        import java.util.*;
049:        import java.net.URLConnection;
050:        import java.net.URL;
051:        import java.net.MalformedURLException;
052:
053:        import org.apache.log4j.Logger;
054:
055:        import com.ecyrd.jspwiki.FileUtil;
056:        import com.ecyrd.jspwiki.NoRequiredPropertyException;
057:        import com.ecyrd.jspwiki.SearchResult;
058:        import com.ecyrd.jspwiki.TextUtil;
059:        import com.ecyrd.jspwiki.WikiEngine;
060:        import com.ecyrd.jspwiki.WikiContext;
061:        import com.ecyrd.jspwiki.WikiPage;
062:        import com.ecyrd.jspwiki.WikiProvider;
063:        import com.ecyrd.jspwiki.attachment.Attachment;
064:        import com.ecyrd.jspwiki.attachment.AttachmentManager;
065:        import com.ecyrd.jspwiki.providers.ProviderException;
066:        import com.ecyrd.jspwiki.providers.WikiPageProvider;
067:        import com.ecyrd.jspwiki.util.ClassUtil;
068:        import com.ecyrd.jspwiki.search.SearchProvider;
069:
070:        import com.sun.portal.search.demo.Search;
071:        import com.sun.portal.search.soif.SOIF;
072:        import com.sun.portal.search.soif.SOIFInputStream;
073:        import com.sun.portal.search.soif.SOIFOutputStream;
074:
075:        import com.sun.identity.security.*;
076:        import com.iplanet.sso.*;
077:        import com.iplanet.am.sdk.*;
078:
079:        //import com.sun.portal.community.Communities;
080:        //import com.sun.portal.community.Community;
081:        //import com.sun.portal.community.JDBCCommunities;
082:        //import com.sun.portal.community.BaseService;
083:        //import com.sun.portal.community.notification.NotificationService;
084:        //import com.sun.portal.community.notification.Notification;
085:
086:        public class RDMSearchProvider implements  SearchProvider {
087:            private static final Logger log = Logger
088:                    .getLogger(RDMSearchProvider.class);
089:
090:            private WikiEngine m_engine;
091:            private String m_rdmserver;
092:            private String m_searchDatabase;
093:            private boolean m_disabled = false;
094:
095:            static private boolean UPDATE = true;
096:            static private boolean REMOVE = false;
097:
098:            // RDM properties
099:            public static final String PROP_RDMSERVER_URL = "jspwiki.searchServer";
100:            public static final String PROP_WIKI_SEARCH_DATABASE = "jspwiki.searchDatabase";
101:            public static final String DEFAULT_WIKI_SEARCH_DATABASE = "wiki";
102:            public static final String WIKI_URL_PREFIX = "/wiki/Wiki.jsp";
103:
104:            private static final int RDM_MAX_HITS = 500;
105:            private static final int RDM_MAX_RESULTS = 20; // should be same as jspwiki search iterator limit
106:
107:            private Thread m_rdmUpdateThread = null;
108:            private Vector m_updates = new Vector(); // Vector because multi-threaded.
109:
110:            private SSOToken m_adminSSOToken;
111:            private String m_adminSSOTokenID;
112:
113:            public void initialize(WikiEngine engine, Properties props)
114:                    throws NoRequiredPropertyException, IOException {
115:
116:                m_engine = engine;
117:                m_rdmserver = props.getProperty(PROP_RDMSERVER_URL);
118:                m_searchDatabase = props.getProperty(PROP_WIKI_SEARCH_DATABASE);
119:                m_adminSSOToken = engine.getUserManager().getAdminSSOToken();
120:                if (m_adminSSOToken != null)
121:                    m_adminSSOTokenID = m_adminSSOToken.getTokenID().toString();
122:
123:                if (m_rdmserver == null) {
124:                    log
125:                            .error("No RDM search server specified - disabling search.");
126:                    m_disabled = true;
127:                    return;
128:                }
129:
130:                if (m_adminSSOToken == null || m_adminSSOTokenID == null) {
131:                    log
132:                            .error("Failed to initialize admin SSO token ID - disabling search");
133:                    m_disabled = true;
134:                    return;
135:                }
136:
137:                if (m_searchDatabase == null) {
138:                    log
139:                            .warn("No RDM search database specified - using default: "
140:                                    + DEFAULT_WIKI_SEARCH_DATABASE);
141:                    m_searchDatabase = DEFAULT_WIKI_SEARCH_DATABASE;
142:                }
143:
144:                try {
145:                    // test the search server connection, start indexing thread if ok
146:                    Collection c = findPages("*", 0);
147:                    startRDMUpdateThread();
148:                } catch (Exception e) {
149:                    log.error("Problem with RDM Server - disabling search.", e);
150:                    m_disabled = true;
151:                }
152:
153:            }
154:
155:            public void pageRemoved(WikiPage page) {
156:                if (m_disabled)
157:                    return;
158:                //
159:                // remove page from portal search db
160:                try {
161:                    log
162:                            .debug("Removing page " + page.getName()
163:                                    + " from index");
164:                    // XXX supposed to be queuing this, not doing it inline...
165:                    Collection c = new HashSet();
166:                    Object[] pair = new Object[] { page, null };
167:                    c.add(pair);
168:                    doSearchIndex(c, REMOVE);
169:                } catch (IOException e) {
170:                    log.error("Unable to remove page '" + page.getName()
171:                            + "' from index", e);
172:                }
173:            }
174:
175:            public void reindexPage(WikiPage page) {
176:
177:                if (m_disabled)
178:                    return;
179:
180:                if (page != null) {
181:                    String text;
182:                    InputStream stream = null;
183:
184:                    // TODO: Think if this was better done in the thread itself?
185:
186:                    if (page instanceof  Attachment) {
187:                        text = getAttachmentContent((Attachment) page);
188:                        if (text == null)
189:                            stream = getAttachmentContentAsStream((Attachment) page);
190:                    } else {
191:                        // XXX need to clean html
192:                        // XXX seems that some changes to text are not seen here - cache issue?
193:                        text = m_engine.getPureText(page);
194:                    }
195:
196:                    if (text != null || stream != null) {
197:                        log.debug("Scheduling page " + page.getName()
198:                                + " for index update");
199:                        // Add work item to m_updates queue.
200:                        Object[] pair = new Object[2];
201:                        pair[0] = page;
202:                        if (text != null)
203:                            pair[1] = text;
204:                        else
205:                            pair[1] = stream;
206:                        m_updates.add(pair);
207:                    }
208:                    // else what? remove? empty rd?
209:                }
210:            }
211:
212:            public Collection findPages(String query) throws ProviderException {
213:                if (m_disabled) {
214:                    // XXX users see this exception msg on the results page (i18n) 
215:                    // - leave empty for now
216:                    // - the jsp should handle this with its own error message
217:                    //throw new ProviderException("search is currently disabled");
218:                    return null; // null means no results
219:                }
220:                return findPages(query, RDM_MAX_RESULTS);
221:            }
222:
223:            /**
224:             *  Performs a full RDM reindex, if necessary.
225:             *  @throws IOException
226:             */
227:            private void doFullRDMReindex() throws IOException {
228:                try {
229:                    // see if any wiki contents - c.size() is number of hits
230:                    Collection c = findPages("*", 1);
231:                    if (c == null || c.size() == 0) {
232:
233:                        //
234:                        //  No files? Reindex!
235:                        //
236:                        Date start = new Date();
237:
238:                        log.info("Starting RDM reindexing...");
239:
240:                        Collection allPages = m_engine.getPageManager()
241:                                .getAllPages();
242:
243:                        for (Iterator iterator = allPages.iterator(); iterator
244:                                .hasNext();) {
245:                            WikiPage page = (WikiPage) iterator.next();
246:                            reindexPage(page);
247:                        }
248:
249:                        Collection allAttachments = m_engine
250:                                .getAttachmentManager().getAllAttachments();
251:                        String lastAttName = "";
252:                        for (Iterator iterator = allAttachments.iterator(); iterator
253:                                .hasNext();) {
254:                            Attachment att = (Attachment) iterator.next();
255:                            String name = att.getName();
256:                            if (name.equals(lastAttName))
257:                                continue;
258:                            lastAttName = name;
259:                            reindexPage(att);
260:                        }
261:
262:                        Date end = new Date();
263:                        log.info("Full RDM index finished in "
264:                                + (end.getTime() - start.getTime())
265:                                + " milliseconds.");
266:                    } else {
267:                        log.info("Files found in index, not reindexing.");
268:                    }
269:                } catch (NoClassDefFoundError e) {
270:                    log
271:                            .info("RDM libraries do not exist - not using RDM search.");
272:                } catch (ProviderException e) {
273:                    log
274:                            .error(
275:                                    "Problem reading pages while creating RDM index (JSPWiki won't start.)",
276:                                    e);
277:                    throw new IllegalArgumentException(
278:                            "unable to create RDM index");
279:                } catch (Exception e) {
280:                    log.error("Unable to start RDM search", e);
281:                }
282:
283:            }
284:
285:            /**
286:             *  Fetches the attachment content from the repository.
287:             *  Content is flat text that can be used for indexing/searching or display
288:             */
289:            private String getAttachmentContent(String attachmentName,
290:                    int version) {
291:                AttachmentManager mgr = m_engine.getAttachmentManager();
292:
293:                try {
294:                    Attachment att = mgr.getAttachmentInfo(attachmentName,
295:                            version);
296:                    //FIXME: Find out why sometimes att is null
297:                    if (att != null) {
298:                        return getAttachmentContent(att);
299:                    }
300:                } catch (ProviderException e) {
301:                    log.error("Attachment cannot be loaded", e);
302:                }
303:                // Something was wrong, no result is returned.
304:                return null;
305:            }
306:
307:            /**
308:             * @param att Attachment to get content for. Filename extension is used to determine the type of the attachment.
309:             * @return String representing the content of the file.
310:             * FIXME This is a very simple implementation of some text-based attachment, mainly used for testing.
311:             * This should be replaced /moved to Attachment search providers or some other 'plugable' wat to search attachments  
312:             */
313:            private String getAttachmentContent(Attachment att) {
314:                AttachmentManager mgr = m_engine.getAttachmentManager();
315:                //FIXME: Add attachment plugin structure
316:
317:                String filename = att.getFileName();
318:
319:                if (filename.endsWith(".txt") || filename.endsWith(".xml")
320:                        || filename.endsWith(".ini")
321:                        || filename.endsWith(".html")) {
322:                    InputStream attStream;
323:
324:                    try {
325:                        attStream = mgr.getAttachmentStream(att);
326:
327:                        StringWriter sout = new StringWriter();
328:                        FileUtil.copyContents(new InputStreamReader(attStream),
329:                                sout);
330:
331:                        attStream.close();
332:                        sout.close();
333:
334:                        return sout.toString();
335:                    } catch (ProviderException e) {
336:                        log.error("Attachment cannot be loaded", e);
337:                        return null;
338:                    } catch (IOException e) {
339:                        log.error("Attachment cannot be loaded", e);
340:                        return null;
341:                    }
342:                }
343:
344:                return null;
345:            }
346:
347:            /**
348:             * @param att Attachment to get content for. Filename extension is used to determine the type of the attachment.
349:             * @return stream representing the content of the file.
350:             */
351:            private InputStream getAttachmentContentAsStream(Attachment att) {
352:                AttachmentManager mgr = m_engine.getAttachmentManager();
353:                //FIXME: Add attachment plugin structure
354:                String filename = att.getFileName();
355:                InputStream attStream = null;
356:                try {
357:                    attStream = mgr.getAttachmentStream(att);
358:                } catch (ProviderException e) {
359:                    log.error("Attachment cannot be loaded", e);
360:                } catch (IOException e) {
361:                    log.error("Attachment cannot be loaded", e);
362:                }
363:                return attStream;
364:            }
365:
366:            /**
367:             *  Waits first for a little while before starting to go through
368:             *  the RDM "pages that need updating".
369:             */
370:            private void startRDMUpdateThread() {
371:                m_rdmUpdateThread = new Thread(new Runnable() {
372:                    public void run() {
373:                        // FIXME: This is a kludge - JSPWiki should somehow report
374:                        //        that init phase is complete.
375:                        try {
376:                            Thread.sleep(60000L);
377:                        } catch (InterruptedException e) {
378:                        }
379:
380:                        // XXX need ta do a single rdm batch here...
381:                        try {
382:                            doFullRDMReindex();
383:                            while (true) {
384:                                //doFullRDMReindex(); // XXX debugging
385:                                if (m_updates.size() > 0)
386:                                    doSearchIndex(m_updates, UPDATE);
387:                                try {
388:                                    Thread.sleep(3000); // index a batch every 5 seconds
389:                                } catch (InterruptedException e) {
390:                                }
391:                            }
392:                        } catch (Exception e) {
393:                            log
394:                                    .error(
395:                                            "Problem with RDM indexing - indexing shut down (no searching)",
396:                                            e);
397:                        }
398:                    }
399:                });
400:                m_rdmUpdateThread.start();
401:            }
402:
403:            //
404:            // Update is either UPDATE (update/add to index) or REMOVE (delete from index)
405:            //
406:            private void doSearchIndex(Collection pages, boolean update)
407:                    throws IOException {
408:                // XXX The pages collection contains pages from different wikis, we don't really
409:                // want to, but we will do a separate search server upload for each wiki - this is 
410:                // because of the way search virtual dbs currently work
411:
412:                Map wikiPageMap = new HashMap();
413:
414:                for (Iterator i = pages.iterator(); i.hasNext();) {
415:                    Object[] pair = (Object[]) i.next();
416:                    i.remove();
417:                    WikiPage page = (WikiPage) pair[0];
418:                    String wikiName = m_engine.getWikiName(page.getName());
419:                    if (wikiName == null)
420:                        wikiName = "";
421:                    List wikiPageList = (List) wikiPageMap.get(wikiName);
422:                    if (wikiPageList == null) {
423:                        wikiPageList = new ArrayList();
424:                    }
425:                    wikiPageList.add(pair);
426:                    wikiPageMap.put(wikiName, wikiPageList);
427:                }
428:
429:                // now we have the pages grouped by wiki name - index them one batch per wiki
430:
431:                int batchCount = 0;
432:                int pageCount = 0;
433:
434:                for (Iterator batchIt = wikiPageMap.entrySet().iterator(); batchIt
435:                        .hasNext();) {
436:
437:                    batchCount++;
438:                    Map.Entry batchEntry = (Map.Entry) batchIt.next();
439:                    String wikiName = (String) batchEntry.getKey();
440:                    Collection pageBatch = (Collection) batchEntry.getValue();
441:
442:                    URLConnection pc = null;
443:                    try {
444:                        pc = new URL(m_rdmserver).openConnection();
445:                    } catch (MalformedURLException mue) {
446:                        throw new IOException("Invalid URL: " + m_rdmserver
447:                                + mue);
448:                    }
449:
450:                    pc.setAllowUserInteraction(true);
451:                    pc.setUseCaches(false);
452:                    pc.setDoOutput(true);
453:                    pc.setDoInput(true);
454:
455:                    SOIFOutputStream sos = new SOIFOutputStream(pc
456:                            .getOutputStream());
457:                    pc.connect();
458:
459:                    String db = m_searchDatabase;
460:                    if (wikiName.length() > 0)
461:                        db += ".jdo__" + wikiName;
462:                    SOIF hdr = new SOIF("RDMHEADER", "-");
463:                    hdr.insert("rdm-type", "rd-submit-request");
464:                    hdr.insert("submit-database", db);
465:                    hdr.insert("rdm-access-token", m_adminSSOTokenID);
466:                    sos.write(hdr);
467:
468:                    SOIF req = new SOIF("Request", "-");
469:                    req.insert("submit-type", "nonpersistent");
470:                    if (update)
471:                        req.insert("submit-operation", "insert");
472:                    else
473:                        req.insert("submit-operation", "delete");
474:                    sos.write(req);
475:
476:                    for (Iterator i = pageBatch.iterator(); i.hasNext();) {
477:                        Object[] pair = (Object[]) i.next();
478:                        i.remove();
479:                        WikiPage page = (WikiPage) pair[0];
480:                        Object content = pair[1];
481:                        if (update)
482:                            log.debug("Updating RDM server for page '"
483:                                    + page.getName() + "'");
484:                        else
485:                            log.debug("Removing page '" + page.getName()
486:                                    + "' from RDM server");
487:                        SOIF pageRD = makePageRD(page, content, update);
488:                        sos.write(pageRD);
489:                        pageCount++;
490:                        //System.out.println(pageRD);
491:
492:                        // register notification with community
493:                        try {
494:                            ;
495:                            //registerNotification(page);
496:                        } catch (Exception e) {
497:                            e.printStackTrace();
498:                            log.error(
499:                                    "Registering community notification failed for page '"
500:                                            + page.getName() + "'", e);
501:                        }
502:                    }
503:                    sos.close();
504:
505:                    // read the search server response (ignored)
506:                    SOIFInputStream sis = new SOIFInputStream(pc
507:                            .getInputStream());
508:                    SOIF s;
509:                    while ((s = sis.readSOIF()) != null) {
510:                        ;
511:                    }
512:                    sis.close();
513:
514:                }
515:
516:                log.debug("Sent " + pageCount + " pages in " + batchCount
517:                        + " batches for " + (update ? "update" : "deletion")
518:                        + " to search server.");
519:
520:            }
521:
522:            private SOIF makePageRD(WikiPage page, Object content,
523:                    boolean update) {
524:                String documentURL = getWikiURL(page);
525:
526:                SOIF rd = new SOIF("DOCUMENT", documentURL);
527:
528:                if (!update) {
529:                    // only needs the url when deleting an RD
530:                    return rd;
531:                }
532:
533:                rd.insert("title", page.getName());
534:
535:                if (page.getAuthor() != null && page.getAuthor().length() > 0) {
536:                    rd.insert("author", page.getAuthor());
537:                }
538:
539:                if (page.getLastModified() != null) {
540:                    rd.insert("last-modified", page.getLastModified()
541:                            .toString());
542:                }
543:
544:                if (content instanceof  String) {
545:                    //rd.insert("partial-text", getPageText(page.getName(), WikiPageProvider.LATEST_VERSION));
546:                    rd.insert("partial-text", (String) content);
547:                } else {
548:
549:                    /*
550:                    if (urlPattern != null) {
551:                    Object[] args = {Encoder.urlEncode(file.getName(), "UTF-8"), Encoder.urlEncode(file.getPath(), "UTF-8")};
552:                    String downloadUrlString= java.text.MessageFormat.format(urlPattern, args);
553:                    rd.insert("rd-display-url", downloadUrlString);
554:                    }
555:                     */
556:                    try {
557:                        InputStream in = (InputStream) content;
558:                        ByteArrayOutputStream bos = new ByteArrayOutputStream(
559:                                4000);
560:                        DataOutputStream dos = new DataOutputStream(bos);
561:                        byte[] buf = new byte[4000];
562:                        int n;
563:                        while ((n = in.read(buf)) > 0) {
564:                            dos.write(buf, 0, n);
565:                        }
566:                        in.close();
567:                        dos.close();
568:                        rd.insert("rd-file-binary", bos.toByteArray());
569:                        rd.insert("rd-file-name", page.getName());
570:                    } catch (java.io.IOException ioe) {
571:                        log
572:                                .error(
573:                                        "Failed to copy attachment content as stream for page",
574:                                        ioe);
575:                    }
576:                }
577:
578:                //rd.insert("classification", "Communities:" + page.getName());
579:
580:                // Now add the names of the attachments of this page
581:                try {
582:                    Collection attachments = m_engine.getAttachmentManager()
583:                            .listAttachments(page);
584:                    int atnum = 0;
585:                    for (Iterator it = attachments.iterator(); it.hasNext();) {
586:                        Attachment att = (Attachment) it.next();
587:                        rd.insert("attachment", att.getName(), atnum++);
588:                    }
589:                } catch (ProviderException e) {
590:                    // Unable to read attachments
591:                    log.error("Failed to get attachments for page", e);
592:                }
593:
594:                return rd;
595:            }
596:
597:            /*
598:            private void registerNotification(WikiPage page) throws Exception
599:            {
600:
601:            Communities ctys = new JDBCCommunities();
602:            //
603:            // TODO: this is stupid.  we need to either have a method
604:            //       in community which can look up a community based on its name
605:            //       or have community ID passed in from the wiki portlet
606:            Community cty = null;
607:            for (Iterator i=ctys.getAll().iterator(); i.hasNext(); ) {
608:                Community c = (Community)i.next();
609:                Map attrs = c.getAttributes(BaseService.NAME);
610:                String ctyName = (String)attrs.get(BaseService.TITLE_KEY);
611:                if (ctyName.equals(page.getName())) {
612:            	cty = c;
613:            	break;
614:                }
615:            }
616:            if (cty == null) {
617:                throw new Exception("No suitable community found that correponds to wiki page=" + page.getName());
618:            }
619:
620:            NotificationService ns = new NotificationService(cty);
621:            String subject = page.getName() + " Wiki has been updated.";
622:
623:            System.out.println("Registering notification, subject=" + subject);
624:
625:            Map attrs = cty.getAttributes(BaseService.NAME);
626:            String ctyTabContainerName = (String)attrs.get(BaseService.TAB_CONTAINER_NAME_KEY);
627:
628:            String wikiURL = "/wiki/Wiki.jsp?page=" + page.getName();
629:
630:            String body = page.getName() + " Wiki: " +
631:                "[<A HREF=\"" + wikiURL + "\" " +
632:                "TARGET=\"" + page.getName() + " Wiki" + "\" " +
633:                "onClick=\"javascript: var wikiWin=window.open('" + 
634:                wikiURL + "', '" + page.getName() + " Wiki" + "', '" +
635:                "hotkeys=no,status=no,resizable=yes,scrollbars=yes,toolbar=yes');" +
636:                "wikiWin.focus();return false;\"" +
637:                ">View</A>]";
638:
639:            Notification n = new Notification(subject, body);
640:            ns.add(n);    
641:            }
642:             */
643:
644:            private String getWikiURL(WikiPage page) {
645:                //String wikiURL = WIKI_URL_PREFIX + "?page=" + page.getName() + "&version=" + page.getVersion();
646:                String wikiURL = WIKI_URL_PREFIX + "?page=" + page.getName();
647:                return wikiURL;
648:            }
649:
650:            /**
651:             *  Adds a page-text pair to the RDM update queue.  Safe to call always
652:             */
653:            public Collection findPages(String query, int numresults)
654:                    throws ProviderException {
655:                ArrayList list = null; // this indicates no results (XXX)
656:                String searchdb = m_searchDatabase;
657:
658:                // If we're in a subwiki, search that wiki only
659:                String virtualdb = WikiContext.getWikiName();
660:                if (virtualdb != null && virtualdb.length() > 0)
661:                    searchdb += ".jdo__" + virtualdb; // XXX Need to avoid this jdo nonsense
662:
663:                String userSSOTokenID = null;
664:                try {
665:                    SSOToken ssoToken = SSOTokenManager.getInstance()
666:                            .createSSOToken(
667:                                    WikiContext.getHttpRequestForThread());
668:                    userSSOTokenID = ssoToken.getTokenID().toString();
669:                } catch (Exception ignored) {
670:                    // invalid token, expired session, etc - search will proceed with no credentials
671:                }
672:
673:                /* The Search class encapsulates the search.
674:                 ** It's parameters are:
675:                 **  1) the search string
676:                 **  2) the attributes you want returned, comma delimited
677:                 **  3) sort order, comma delimited, - descending, + ascending
678:                 **  4) hit number of first result
679:                 **  5) number of results to return
680:                 **  6) query language, eg search, taxonomy-basic, schema-basic, etc
681:                 **  7) database to search
682:                 **  8) The RDM server URL, eg, http://portal.siroe.com:2222/mySearch/search
683:                 **  9) Access token (null for anonymous access, or valid
684:                 **     Sun ONE Identity Server session id)
685:                 */
686:                Search search = new Search(query,
687:                        // XXX "score,url,title,description",
688:                        "score", "-score", 1, numresults, "search", searchdb,
689:                        m_rdmserver, userSSOTokenID);
690:
691:                try {
692:                    boolean catchExceptions = false;
693:                    search.doQuery(catchExceptions);
694:
695:                    // Check the result count. -1 indicates an error. XXX
696:                    if ((numresults == -1 || numresults > 0)
697:                            && search.getResultCount() > 0) {
698:
699:                        // XXX jspwiki search interface does not have an explicit getHitsCount() method,
700:                        // instead it reports the size of the collection as the number of hits.
701:                        // So as a kludge we allocate a bigger array (up to a limit) and null fill it.
702:                        // The searchresult iterator fails if it hits a null (XXX), so the first 
703:                        //
704:                        int arraysize = Math.min(search.getHitCount(),
705:                                RDM_MAX_HITS);
706:
707:                        list = new ArrayList(arraysize);
708:
709:                        SOIFInputStream resultStream = search.getResultStream();
710:                        SOIF soif;
711:
712:                        int count = 0;
713:                        for (soif = resultStream.readSOIF(); soif != null; soif = resultStream
714:                                .readSOIF()) {
715:                            String u = soif.getURL();
716:                            String pageName = u
717:                                    .substring(u.indexOf("page=") + 5); // XXX brittle
718:
719:                            WikiPage page = m_engine.getPage(pageName,
720:                                    WikiPageProvider.LATEST_VERSION);
721:
722:                            if (page != null) {
723:                                if (page instanceof  Attachment) {
724:                                    // Currently attachments don't look nice on the search-results page
725:                                    // When the search-results are cleaned up this can be enabled again.
726:                                }
727:                                int score = Integer.parseInt(soif
728:                                        .getValue("score"));
729:                                SearchResult result = new SearchResultImpl(
730:                                        page, score);
731:                                list.add(result);
732:                                count++;
733:                            } else {
734:                                log
735:                                        .error("Search found a result page '"
736:                                                + pageName
737:                                                + "' that could not be loaded, removing from index");
738:                                pageRemoved(new WikiPage(pageName));
739:                            }
740:                        }
741:                        // null fill the result array 
742:                        // XXX jspwiki search interface should have an explicit getHitsCount() method...
743:                        // so as a kludge we allocate a bigger array (up to a limit) whose size is the hitcount.
744:                        // The searchresult iterator fails if it hits a null (XXX), so it should not 
745:                        // iterate past numresults (current search results page shows max 20 results)
746:                        for (int i = count; i < arraysize; ++i) {
747:                            list.add(null);
748:                        }
749:                    }
750:
751:                } catch (Exception ex) {
752:                    log.info("Exception during RDM search", ex);
753:                    //throw new ProviderException("Error during search. Please report to administrator"); // XXX i18n
754:                    throw new ProviderException(null); // XXX i18n
755:                }
756:
757:                return list;
758:            }
759:
760:            public String getProviderInfo() {
761:                return "RDMSearchProvider";
762:            }
763:
764:            // FIXME: This class is dumb; needs to have a better implementation
765:            private class SearchResultImpl implements  SearchResult {
766:                private WikiPage m_page;
767:                private int m_score;
768:
769:                public SearchResultImpl(WikiPage page, int score) {
770:                    m_page = page;
771:                    m_score = score;
772:                }
773:
774:                public WikiPage getPage() {
775:                    return m_page;
776:                }
777:
778:                /* (non-Javadoc)
779:                 * @see com.ecyrd.jspwiki.SearchResult#getScore()
780:                 */
781:                public int getScore() {
782:                    return m_score;
783:                }
784:
785:            }
786:
787:        }
www.java2java.com | Contact Us
Copyright 2009 - 12 Demo Source and Support. All rights reserved.
All other trademarks are property of their respective owners.