001: /*
002: * Copyright 2001 Sun Microsystems, Inc. All rights reserved.
003: * PROPRIETARY/CONFIDENTIAL. Use of this product is subject to license terms.
004: */
005:
006: package com.sun.portal.search.db;
007:
008: import com.sun.portal.search.rdm.*;
009: import com.sun.portal.search.soif.*;
010: import com.sun.portal.search.util.*;
011: import com.sun.portal.log.common.PortalLogger;
012:
013: import com.sun.kt.search.*;
014:
015: import java.util.*;
016: import java.util.logging.Logger;
017: import java.util.logging.Level;
018: import java.io.*;
019:
020: /**
021: * Searchengine interface implementation using SunLabs Nova search engine.
022: */
023: public class NovaDb implements RDMDb {
024:
025: // Nova search engine.
026: SearchEngine engine;
027:
028: // An index/query configuration set of properties.
029: Properties indexProps;
030:
031: // Whether we'll save the description and partial text.
032: static boolean saveData = false;
033:
034: /**
035: * Determines whether we will save data directly into Nova, or use BDB.
036: */
037: public static void setSaveData(boolean sD) {
038: saveData = sD;
039: }
040:
041: // Query parameters
042: Semaphore querySemaphore;
043: int queryThreads = 6;
044:
045: // Schema
046: static Map schemaAttrMap = new HashMap(); // map of objtype -> schema attr array XXX messy
047:
048: // SJG: Added the index config properties, as this is a good place to
049: // configure the field types and multipliers.
050: protected void load_schema_index_attrs(String objectType,
051: Properties config) {
052:
053: SageAttr[] av = null;
054: RDMSchema schema = RDMSchema
055: .getSchema(objectType.toLowerCase());
056:
057: if (schema == null) {
058: // log - No schema for object type: {0} - indexing will be skipped
059: SearchLogger.getLogger().log(Level.WARNING,
060: "PSSH_CSPSB0006", objectType);
061: return;
062: }
063:
064: String[] attrs = schema.schema_index_attrs();
065: String type, multiplier;
066: int i = 0, n;
067: boolean did_url = false;
068: List multFields = new ArrayList();
069: List multipliers = new ArrayList();
070: ;
071:
072: boolean taxonomy = schema.getName().equalsIgnoreCase(
073: "CLASSIFICATION");
074:
075: n = attrs.length;
076:
077: SearchLogger.getLogger().log(Level.FINE, "PSSH_CSPSB0007",
078: Integer.toString(n));
079:
080: //av = new SageAttr[n + 1 + 1];
081: av = new SageAttr[n];
082:
083: // Load predefined indexed fields from schema
084: for (i = 0; i < n; i++) {
085: av[i] = new SageAttr();
086: av[i].name = attrs[i];
087: String lcname = av[i].name.toLowerCase();
088:
089: // Set the data type
090: if ((type = schema.schema_attr_type(av[i].name)) == null)
091: type = "string";
092: if (type.equalsIgnoreCase("string")
093: || type.equalsIgnoreCase("blob"))
094: av[i].is_string = true;
095: else if (type.equalsIgnoreCase("int")
096: || type.equalsIgnoreCase("serial"))
097: av[i].is_number = true;
098: else if (type.equalsIgnoreCase("date"))
099: av[i].is_date = true;
100: if (av[i].name.equalsIgnoreCase("url"))
101: did_url = true;
102:
103: //
104: // Set up the field attributes in the indexing configuration
105: // for the search engine.
106: if (lcname.equals("id") && taxonomy) {
107: config.setProperty(lcname + ".type", "String");
108: config.setProperty(lcname + ".indexed", "false");
109: config.setProperty(lcname + ".tokenized", "false");
110: } else if (lcname.equals("url")) {
111: config.setProperty(lcname + ".type", "String");
112: config.setProperty(lcname + ".indexed", "true");
113: config.setProperty(lcname + ".tokenized", "true");
114: } else if (lcname.equals("readacl")) {
115: // want readacl to be indexed, but not tokenized
116: // XXX should be in schema
117: config.setProperty(lcname + ".type", "String");
118: config.setProperty(lcname + ".indexed", "true");
119: config.setProperty(lcname + ".tokenized", "false");
120: } else if (lcname.equals("virtual-db")) {
121: config.setProperty(lcname + ".type", "String");
122: config.setProperty(lcname + ".indexed", "false");
123: config.setProperty(lcname + ".tokenized", "false");
124: } else if (lcname.equals("description")) {
125: // Only save description as a field if we're saving the data
126: if (saveData) {
127: config.setProperty(lcname + ".type", "String");
128: }
129: config.setProperty(lcname + ".indexed", "true");
130: config.setProperty(lcname + ".tokenized", "true");
131: } else if (lcname.equals("partial-text")) {
132: // Only create/save p-t as a field if we're saving the data
133: if (saveData) {
134: config.setProperty(lcname + ".type", "String");
135: config.setProperty(lcname + ".indexed", "true");
136: config.setProperty(lcname + ".tokenized", "true");
137: }
138: } else {
139: if (av[i].is_date) {
140: config.setProperty(lcname + ".type", "Date");
141: config.setProperty(lcname + ".indexed", "false");
142: config.setProperty(lcname + ".tokenized", "false");
143: } else if (av[i].is_number) {
144: config.setProperty(lcname + ".type", "Integer");
145: config.setProperty(lcname + ".indexed", "false");
146: config.setProperty(lcname + ".tokenized", "false");
147: } else {
148: config.setProperty(lcname + ".type", "String");
149: config.setProperty(lcname + ".indexed", "true");
150: config.setProperty(lcname + ".tokenized", "true");
151: }
152: }
153:
154: // The score multiplier
155: if ((multiplier = schema.getValue(av[i].name,
156: RDM.A_RDM_SCORE_MULTIPLIER)) != null) {
157: try {
158: Float m = Float.valueOf(multiplier);
159: config.setProperty(lcname + ".mult", m.toString());
160: multFields.add(av[i].name);
161: multipliers.add(m);
162: } catch (IllegalArgumentException e) {
163: String[] params = new String[2];
164: params[0] = av[i].name;
165: params[1] = multiplier;
166: // log - Ignoring illegal score multiplier: {0} : {1}
167: SearchLogger.getLogger().log(Level.WARNING,
168: "PSSH_CSPSB0008", params);
169: }
170: }
171: }
172:
173: schemaAttrMap.put(objectType.toLowerCase(), av);
174: }
175:
176: /**
177: * Turns a SOIF into a map, suitable for indexing or
178: * highlighting.
179: */
180: public static Map soifToIndexableMap(SOIF s) {
181: Map ret = new SequencedMap();
182:
183: SageAttr[] attrs = (SageAttr[]) schemaAttrMap.get(s
184: .getSchemaName().toLowerCase());
185:
186: Logger debugLogger1 = SearchLogger.getLogger();
187:
188: if (attrs == null) {
189: if (debugLogger1.isLoggable(Level.INFO)
190: || !s.getSchemaName().equalsIgnoreCase("TAXONOMY"))
191: // log - Unknown schema type - object not indexed: @{0} {1}
192: debugLogger1.log(Level.WARNING, "PSSH_CSPSB0009",
193: new String[] { s.getSchemaName(), s.getURL() });
194: return ret;
195: }
196:
197: boolean taxonomy = s.getSchemaName().equalsIgnoreCase(
198: "CLASSIFICATION");
199:
200: if (taxonomy) {
201: // Save the Id but don't index it (only index the leaf category
202: // - see below) (XXX should be controlled by the schema)
203: debugLogger1.log(Level.FINEST, "PSSH_CSPSB0010", s
204: .getValue("Id"));
205: ret.put("id", s.getValue("Id"));
206: } else {
207: debugLogger1
208: .log(Level.FINEST, "PSSH_CSPSB0011", s.getURL());
209: ret.put("url", s.getURL());
210: }
211:
212: for (int i = 0; i < attrs.length; i++) {
213: String fldname = attrs[i].name.toLowerCase();
214:
215: // Ignore fields that match the doc key to avoid creating a MV field
216: // XXX Index anyway, but don't save?
217: if (taxonomy && fldname.equalsIgnoreCase("Id")
218: || fldname.equalsIgnoreCase("URL"))
219: continue;
220:
221: AVPair avp = s.getAVPair(attrs[i].name);
222: if (avp == null) {
223: if (taxonomy
224: && attrs[i].name.equalsIgnoreCase("Category")) {
225: // Handle Category pseudo field - leaf component of Id
226: // XXX this should be a first class taxonomy field
227: String id = s.getValue(RDM.A_RDM_ID);
228: String cat = id;
229: int z = 0;
230: if ((z = id.lastIndexOf(':')) != -1)
231: cat = id.substring(++z);
232: avp = new AVPair("Category", cat);
233: } else {
234: debugLogger1.log(Level.FINEST, "PSSH_CSPSB0012",
235: new String[] { fldname, s.getURL() });
236: continue;
237: }
238: }
239:
240: String[] val = avp.getStringValues();
241:
242: if (val.length == 0) {
243: continue;
244: }
245:
246: if (debugLogger1.isLoggable(Level.FINEST)) {
247: String[] params = new String[2];
248: params[0] = fldname;
249: params[1] = (val[0].length() <= 50) ? val[0] : (val[0]
250: .substring(0, 50) + "...");
251: // log - indexing field: {0} : {1} ...
252: debugLogger1
253: .log(Level.FINEST, "PSSH_CSPSB0013", params);
254: }
255:
256: if (val.length == 1) {
257: ret.put(fldname, val[0]);
258: } else {
259: ret.put(fldname, val);
260: }
261: }
262:
263: // Now index the body field(s)
264: // XXX should be controlled by schema
265: // XXX what about description and other non-indexable fields?
266: String desc = s.getValue("description");
267: String body = s.getValue("partial-text");
268:
269: if (desc != null) {
270: // If description was not derived from the body, index it.
271: // Use a field, but don't save the data. The field helps in highlighting
272: // and allows <contains> for RDs which have a description which
273: // wasn't derived from the body. XXX There's no way to allow contains
274: // for RDs with body-derived descriptions without adversely affecting
275: // the scores for terms in the derived description.
276: int matchLen = Math.max(0, desc.length() - 3); // Allow for legacy "..."
277: if (body == null
278: || !desc.regionMatches(0, body, 0, matchLen)) {
279: if (debugLogger1.isLoggable(Level.FINEST)) {
280: String param;
281: if (desc.length() <= 50)
282: param = desc;
283: else
284: param = desc.substring(0, 50);
285: debugLogger1.log(Level.FINEST, "PSSH_CSPSB0014",
286: param);
287: }
288: ret.put("description", desc);
289: }
290: }
291:
292: if (body != null) {
293: if (debugLogger1.isLoggable(Level.FINEST)) {
294: String param;
295: if (desc.length() <= 50)
296: param = body;
297: else
298: param = body.substring(0, 50);
299: debugLogger1.log(Level.FINEST, "PSSH_CSPSB0015", param);
300: }
301: ret.put("partial-text", body);
302: }
303:
304: return ret;
305: }
306:
307: /**
308: * Gets the document key from a SOIF. For taxonomy documents, this is
309: * the ID, while for normal documents, it is the URL.
310: */
311: public static String getSOIFKey(SOIF s) {
312: return s.getSchemaName().equalsIgnoreCase("CLASSIFICATION") ? s
313: .getValue("Id") : s.getURL();
314: }
315:
316: public void indexBatch(SToken st) throws RDMException {
317:
318: // XXX keep a count and a timer for batch flushing
319:
320: synchronized (curBatch) {
321:
322: if (curBatch.size() == 0)
323: return;
324:
325: // Trigger indexing
326: long start = new Date().getTime();
327:
328: SearchLogger.getLogger().log(Level.FINE, "PSSH_CSPSB0016");
329: int nrds = 0;
330:
331: // Gets a pipeline that we can stream the SOIFs to for indexing.
332: Pipeline pipe = engine.getIndexingPipeline();
333:
334: for (Iterator i = curBatch.iterator(); i.hasNext();) {
335: SOIF s = (SOIF) i.next();
336:
337: Map im;
338: try {
339: im = soifToIndexableMap(s);
340: } catch (Exception e) {
341: // Failed to handle this RD - try to continue processing RDs
342: // XXX how to return error status?
343: // log - Failed to index: {0}
344: SearchLogger.getLogger().log(Level.WARNING,
345: "PSSH_CSPSB0017", s.getURL());
346: continue;
347: }
348: try {
349: pipe.index(getSOIFKey(s), im);
350: nrds++;
351: } catch (SearchEngineException se) {
352:
353: //
354: // D'oh! Flush what we managed to index and return the pipeline.
355: pipe.flush();
356: engine.returnIndexingPipeline(pipe);
357: throw new RDMException("Error indexing SOIF", se);
358: }
359: }
360: pipe.flush();
361: engine.returnIndexingPipeline(pipe);
362: engine.merge();
363: curBatch.clear();
364:
365: long finish = new Date().getTime();
366: long diff = (finish - start) / 1000;
367: int nrds1 = (int) (nrds * 3600.0f * 1000f / (finish - start));
368: // log - Indexed {0} RDs in {1} seconds ({2} per hour)
369: SearchLogger.getLogger().log(
370: Level.INFO,
371: "PSSH_CSPSB0018",
372: new String[] { Integer.toString(nrds),
373: Long.toString(diff),
374: Integer.toString(nrds1) });
375: }
376: }
377:
378: List curBatch = new ArrayList();
379: // XXX batch size hard coded, enforced as ceiling - Nova does its own batching
380: static int maxBatchSize = 50000;
381:
382: public void store(SToken st, SOIF s, Set view, int flags,
383: RDMTransaction t) throws RDMException {
384:
385: // XXX how to handle view... (need to build a new SOIF?)
386: synchronized (curBatch) {
387: curBatch.add(s);
388: // XXX semaphore here?
389: if (curBatch.size() >= maxBatchSize) {
390: // XXX keep a count and a timer for batch flushing
391: indexBatch(st);
392: }
393: }
394:
395: }
396:
397: public void delete(SToken st, SOIF s, Set view, int flags,
398: RDMTransaction t) throws RDMException {
399: SearchLogger.getLogger().log(Level.FINE, "PSSH_CSPSB0019",
400: s.getURL());
401: engine.delete(getSOIFKey(s));
402: }
403:
404: public void update(SToken st, SOIF insoif, Set view, int flags,
405: RDMTransaction t) throws RDMException {
406: throw new RDMException("NovaDb: update() not implemented");
407: }
408:
409: public RDMResultSet search(SToken st, String qry, int numHits,
410: Set view, String sortOrder, RDMTransaction t)
411: throws RDMException {
412:
413: // XXX move all the args to a query/session object?
414:
415: RDMResultSet resultSet = null;
416:
417: try {
418: querySemaphore.acquire();
419:
420: String query = RDMSecurityManager.getInstance()
421: .qualify_Nova_Query(st, qry);
422: List filters = null; // XXX
423:
424: ResultSet res = engine.search(query, sortOrder, filters);
425: SearchLogger.getLogger().log(Level.FINE, "PSSH_CSPSB0020",
426: Long.toString(res.getQueryTime()));
427:
428: resultSet = new NovaResultSet(st, this , query, res,
429: numHits, engine.getNDocs(), t);
430:
431: } catch (Exception e) {
432: // log - Error during search
433: SearchLogger.getLogger().log(Level.WARNING,
434: "PSSH_CSPSB0021", e);
435: throw new RDMException("Error during search");
436: } finally {
437: querySemaphore.release();
438: }
439:
440: return resultSet;
441:
442: }
443:
444: /**
445: * retrieve RD from database
446: */
447: public SOIF fetch(SToken st, String url, int flags, RDMTransaction t)
448: throws RDMException {
449: return fetch(st, url, /*view=*/null, flags, t);
450: }
451:
452: /**
453: * retrieve RD from database, filtered by view
454: */
455: public SOIF fetch(SToken st, String url, Set view, int flags,
456: RDMTransaction t) throws RDMException {
457:
458: // XXX This is not a real db yet - no way to recover a complete SOIF
459: if (true)
460: throw new RDMException("not implemented");
461:
462: NovaResultSet rs = (NovaResultSet) search(st, "URL = "
463: + Encoder.quotedEscape(url, true), 1, null, null, t);
464: if (rs.getHitCount() != 1) {
465: // log - {0} documents found for url: {1}
466: SearchLogger.getLogger()
467: .log(
468: Level.WARNING,
469: "PSSH_CSPSB0022",
470: new String[] {
471: Long.toString(rs.getHitCount()),
472: url });
473: return null;
474: }
475: Result hit = rs.getHit(0);
476: String urlx = (String) hit.getField("url");
477: SOIF res = new SOIF("DOCUMENT", urlx);
478: // XXX not handling MV attrs correctly
479: Iterator i;
480: if (view != null)
481: i = view.iterator();
482: else
483: i = hit.getFieldIterator(); // XXX this iterator returns default value fields (which we don't really want)
484:
485: while (i.hasNext()) {
486: String att;
487: Object val;
488: if (view != null) {
489: att = (String) i.next();
490: val = hit.getField(att);
491: } else {
492: Map.Entry e = (Map.Entry) i.next();
493: att = (String) e.getKey();
494: if (att.equalsIgnoreCase("URL"))
495: continue; // don't add url as a field XXX what if present in original? - highlighting trouble
496: val = e.getValue();
497: }
498: if (val == null)
499: continue;
500: if (val instanceof String) {
501: // XXX temporary hack for MV fields - inefficient - move to Nova
502: String s = (String) val;
503: if (s.indexOf('\0') != -1) {
504: int j = 0;
505: StringTokenizer tok = new StringTokenizer(s, "\0");
506: while (tok.hasMoreTokens())
507: res.insert(att, tok.nextToken(), j++);
508: } else
509: res.insert(att, val.toString());
510: } else if (val instanceof Date)
511: res.insert(att, val.toString()); // XXX which date format?
512: else if (val instanceof Integer)
513: res.insert(att, val.toString());
514: else
515: // log - unknown field type: {0} : {1}
516: SearchLogger.getLogger().log(Level.WARNING,
517: "PSSH_CSPSB0023",
518: new Object[] { url, val.getClass().getName() });
519: }
520: return res;
521: }
522:
523: /**
524: * Delete a SOIF stream. Should do automatic batching if appropriate.
525: */
526: public void delete(SToken st, SOIFInputStream ss, Set view,
527: int flags, RDMTransaction t) throws RDMException {
528: try {
529: SOIF s;
530: while ((s = ss.readSOIF()) != null) {
531: delete(st, s, view, flags, t);
532: }
533: } catch (Exception e) {
534: throw new RDMException(e);
535: }
536: }
537:
538: public int count(SToken st, RDMTransaction t) throws RDMException {
539: return engine.getNDocs();
540: }
541:
542: /**
543: * open -- Opens a database
544: * - rootdir -- db home dir
545: * - dbname -- name of database from root.db (e.g., default)
546: * - rw -- RDMDb.WRITER or RDMDb.WRCREAT or RDMDb.READER
547: * - mode -- Unix mode
548: * @param st
549: * @param rootdir
550: * @param dbname
551: * @param rw
552: * @param mode
553: * @throws RDMException */
554: public void open(SToken st, String rootdir, String dbname, int rw,
555: int mode) throws RDMException {
556:
557: try {
558:
559: //
560: // Rather than doing things manually, we'll set the properties
561: // used for the search engine.
562: Properties indexConfig = new Properties();
563:
564: // log - open({0} , {1})
565: SearchLogger.getLogger().log(Level.FINE, "PSSH_CSPSB0024",
566: new Object[] { rootdir, dbname });
567:
568: //
569: // The index directory.
570: indexConfig.setProperty("indexDir", rootdir
571: + File.separator + dbname);
572:
573: String p;
574:
575: // postings type
576: if ((p = SearchConfig.getValue(SearchConfig.INDEX_TYPE)) != null) {
577: indexConfig.setProperty("postingsType", p);
578: }
579:
580: // dictionary type
581: if ((p = SearchConfig
582: .getValue(SearchConfig.DICTIONARY_TYPE)) != null) {
583: indexConfig.setProperty("dictionaryType", p);
584: }
585:
586: // search time limit
587: int lookup_limit = -1;
588: if ((p = SearchConfig.getValue(SearchConfig.LOOKUP_LIMIT)) != null) {
589: int i = Integer.parseInt(p);
590: if (i > 0)
591: indexConfig.setProperty("proxLimit", String
592: .valueOf(i));
593: } else {
594: indexConfig.setProperty("proxLimit", "-1");
595: }
596:
597: // index threads
598: if ((p = SearchConfig.getValue(SearchConfig.INDEX_THREADS)) != null) {
599: int i = Integer.parseInt(p);
600: if (i > 0)
601: indexConfig.setProperty("numPipelines", String
602: .valueOf(i));
603: } else {
604: indexConfig.setProperty("numPipelines", "1");
605: }
606:
607: // indexing pipeline queue length.
608: indexConfig.setProperty("queueLength", "32");
609:
610: // index partition max size
611: if ((p = SearchConfig.getValue(SearchConfig.INDEX_PARTSIZE)) != null) {
612: int i = Integer.parseInt(p);
613: if (i > 0)
614: indexConfig.setProperty("inMemSize", String
615: .valueOf(i));
616: } else {
617: indexConfig.setProperty("inMemSize", "64");
618: }
619:
620: // index merge rate
621: if ((p = SearchConfig
622: .getValue(SearchConfig.INDEX_MERGERATE)) != null) {
623: int i = Integer.parseInt(p);
624: if (i > 0)
625: indexConfig.setProperty("mergeRate", String
626: .valueOf(i));
627: } else {
628: indexConfig.setProperty("mergeRate", "8");
629: }
630:
631: // maximum number of parts to merge
632: if ((p = SearchConfig
633: .getValue(SearchConfig.INDEX_MAXMERGEPARTS)) != null) {
634: int i = Integer.parseInt(p);
635: if (i > 0)
636: indexConfig.setProperty("maxMergeSize", String
637: .valueOf(i));
638: } else {
639: indexConfig.setProperty("maxMergeSize", "40");
640: }
641:
642: //
643: // Set up the field attributes from the schemas.
644: // Initialize modules
645: // XXX pass schema as arg? - in the context?
646: load_schema_index_attrs("DOCUMENT", indexConfig);
647: load_schema_index_attrs("CLASSIFICATION", indexConfig);
648:
649: // query threads
650: if ((p = SearchConfig.getValue(SearchConfig.QUERY_THREADS)) != null) {
651: int i = Integer.parseInt(p);
652: if (i > 0)
653: queryThreads = i;
654: }
655: querySemaphore = new Semaphore(queryThreads);
656:
657: // open the index
658: try {
659: engine = new SearchEngine(indexConfig);
660: } catch (SearchEngineException se) {
661: // log - Error opening search engine
662: SearchLogger.getLogger().log(Level.WARNING,
663: "PSSH_CSPSB0025");
664: throw new RDMException("IO Error opening search engine");
665: }
666:
667: SearchLogger.getLogger().log(Level.FINE, "PSSH_CSPSB0026",
668: rootdir);
669:
670: } catch (Exception e) {
671: if (!(e instanceof RDMException))
672: e = new RDMException(e.getMessage());
673: throw (RDMException) e;
674: }
675:
676: }
677:
678: /**
679: * Closes db and index extents
680: * @param st
681: * @throws RDMException
682: */
683: public void close(SToken st) throws RDMException {
684: SearchLogger.getLogger().log(Level.FINE, "PSSH_CSPSB0027");
685:
686: try {
687: indexBatch(st); // XXX synch?
688: } catch (Exception e) {
689: // log - Error indexing document batch
690: SearchLogger.getLogger().log(Level.WARNING,
691: "PSSH_CSPSB0028", e);
692: }
693:
694: try {
695: engine.close();
696: } catch (SearchEngineException se) {
697: // log - Error closing search engine
698: SearchLogger.getLogger().log(Level.WARNING,
699: "PSSH_CSPSB0029");
700: }
701: SearchLogger.getLogger().log(Level.FINE, "PSSH_CSPSB0030");
702: }
703:
704: /**
705: * @param st
706: * @param t
707: * @param flags
708: * @throws RDMException
709: * @return
710: */
711: public void optimize(SToken st) throws RDMException {
712: try {
713: engine.optimize();
714: } catch (SearchEngineException se) {
715: throw new RDMException("Error optimizing engine", se);
716: }
717: }
718:
719: public int purge(SToken st, RDMTransaction t) throws RDMException {
720: engine.purge();
721: return 0;
722: }
723:
724: public void housekeep() throws RDMException {
725: throw new RDMException("not implemented");
726: }
727:
728: public void setIndexBatchSize(SToken st, int n) throws RDMException {
729: maxBatchSize = n;
730: }
731:
732: public void recover(SToken st, String dbhome, boolean fatal)
733: throws RDMException {
734: throw new RDMException("not implemented");
735: // XXX Nova has recover (index_dir, entry_type); - should be using it
736: }
737:
738: }
|