001: /*
002: * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
003: *
004: * Copyright 1997-2007 Sun Microsystems, Inc. All rights reserved.
005: *
006: * The contents of this file are subject to the terms of either the GNU
007: * General Public License Version 2 only ("GPL") or the Common
008: * Development and Distribution License("CDDL") (collectively, the
009: * "License"). You may not use this file except in compliance with the
010: * License. You can obtain a copy of the License at
011: * http://www.netbeans.org/cddl-gplv2.html
012: * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the
013: * specific language governing permissions and limitations under the
014: * License. When distributing the software, include this License Header
015: * Notice in each file and include the License file at
016: * nbbuild/licenses/CDDL-GPL-2-CP. Sun designates this
017: * particular file as subject to the "Classpath" exception as provided
018: * by Sun in the GPL Version 2 section of the License file that
019: * accompanied this code. If applicable, add the following below the
020: * License Header, with the fields enclosed by brackets [] replaced by
021: * your own identifying information:
022: * "Portions Copyrighted [year] [name of copyright owner]"
023: *
024: * Contributor(s):
025: *
026: * The Original Software is NetBeans. The Initial Developer of the Original
027: * Software is Sun Microsystems, Inc. Portions Copyright 1997-2006 Sun
028: * Microsystems, Inc. All Rights Reserved.
029: *
030: * If you wish your version of this file to be governed by only the CDDL
031: * or only the GPL Version 2, indicate your decision by adding
032: * "[Contributor] elects to include this software in this distribution
033: * under the [CDDL or GPL Version 2] license." If you do not indicate a
034: * single choice of license, a recipient has the option to distribute
035: * your version of this file under either the CDDL, the GPL Version 2 or
036: * to extend the choice of license to its licensees as provided above.
037: * However, if you add GPL Version 2 code and therefore, elected the GPL
038: * Version 2 license, then the option applies only if the new code is
039: * made subject to such option by the copyright holder.
040: */
041: package org.netbeans.modules.languages;
042:
043: import java.lang.ref.WeakReference;
044: import java.util.Collections;
045: import java.util.Iterator;
046: import java.util.List;
047: import java.util.ArrayList;
048: import java.util.HashMap;
049: import java.util.HashSet;
050: import java.util.Map;
051: import java.util.Set;
052: import java.util.WeakHashMap;
053: import javax.swing.text.AbstractDocument;
054: import javax.swing.text.Document;
055:
056: import org.netbeans.api.languages.ASTEvaluator;
057: import org.netbeans.api.languages.ASTEvaluator;
058: import org.netbeans.api.languages.ASTItem;
059: import org.netbeans.api.languages.ASTPath;
060: import org.netbeans.api.languages.ParserManager;
061: import org.netbeans.api.languages.ParserManagerListener;
062: import org.netbeans.api.languages.ASTToken;
063: import org.netbeans.api.languages.SyntaxContext;
064: import org.netbeans.api.languages.ParseException;
065: import org.netbeans.api.lexer.Token;
066: import org.netbeans.api.lexer.TokenHierarchy;
067: import org.netbeans.api.lexer.TokenHierarchyEvent;
068: import org.netbeans.api.lexer.TokenSequence;
069: import org.netbeans.api.languages.SyntaxContext;
070: import org.netbeans.api.languages.ASTNode;
071: import org.netbeans.api.languages.LanguageDefinitionNotFoundException;
072: import org.netbeans.api.languages.TokenInput;
073: import org.netbeans.api.lexer.TokenHierarchyListener;
074: import org.netbeans.modules.languages.lexer.SLanguageHierarchy;
075: import org.netbeans.modules.languages.lexer.SLexer;
076: import org.netbeans.modules.languages.parser.LLSyntaxAnalyser;
077: import org.netbeans.modules.languages.parser.SyntaxError;
078: import org.netbeans.modules.languages.parser.TokenInputUtils;
079: import org.netbeans.spi.lexer.MutableTextInput;
080: import org.openide.util.RequestProcessor;
081:
082: /**
083: *
084: * @author Jan Jancura
085: */
086: public class ParserManagerImpl extends ParserManager {
087:
088: private Document document;
089: private TokenHierarchy tokenHierarchy;
090: private ASTNode ast = ASTNode.create(null, "Root", 0);
091: private State state = State.NOT_PARSED;
092: private List<SyntaxError> syntaxErrors = Collections
093: .<SyntaxError> emptyList();
094: private boolean[] cancel = new boolean[] { false };
095: private Set<ParserManagerListener> listeners;
096: private Map<String, Set<ASTEvaluator>> evaluatorsMap;
097: private static RequestProcessor rp = new RequestProcessor("Parser");
098:
099: public ParserManagerImpl(Document doc) {
100: this .document = doc;
101: tokenHierarchy = TokenHierarchy.get(doc);
102: String mimeType = (String) doc.getProperty("mimeType");
103: if (tokenHierarchy == null) {
104: // for tests only....
105: if (mimeType != null) {
106: try {
107: Language language = LanguagesManager.getDefault()
108: .getLanguage(mimeType);
109: if (language.getParser() != null) {
110: doc.putProperty(
111: org.netbeans.api.lexer.Language.class,
112: new SLanguageHierarchy(language)
113: .language());
114: tokenHierarchy = TokenHierarchy.get(doc);
115: }
116: } catch (LanguageDefinitionNotFoundException ex) {
117: }
118: }
119: }
120: if (tokenHierarchy != null) {
121: new DocListener(this , tokenHierarchy);
122: if (mimeType != null && state == State.NOT_PARSED) {
123: try {
124: LanguagesManager.getDefault().getLanguage(mimeType);
125: startParsing();
126: } catch (LanguageDefinitionNotFoundException e) {
127: //not supported language
128: }
129: }
130: }
131:
132: managers.put(doc, new WeakReference<ParserManager>(this ));
133: }
134:
135: public static ParserManagerImpl getImpl(Document document) {
136: return (ParserManagerImpl) get(document);
137: }
138:
139: public State getState() {
140: return state;
141: }
142:
143: public List<SyntaxError> getSyntaxErrors() {
144: return syntaxErrors;
145: }
146:
147: public ASTNode getAST() {
148: return ast;
149: }
150:
151: public void addListener(ParserManagerListener l) {
152: if (listeners == null)
153: listeners = new HashSet<ParserManagerListener>();
154: listeners.add(l);
155: }
156:
157: public void removeListener(ParserManagerListener l) {
158: if (listeners == null)
159: return;
160: listeners.remove(l);
161: }
162:
163: public void addASTEvaluator(ASTEvaluator e) {
164: if (evaluatorsMap == null)
165: evaluatorsMap = new HashMap<String, Set<ASTEvaluator>>();
166: Set<ASTEvaluator> evaluatorsSet = evaluatorsMap.get(e
167: .getFeatureName());
168: if (evaluatorsSet == null) {
169: evaluatorsSet = new HashSet<ASTEvaluator>();
170: evaluatorsMap.put(e.getFeatureName(), evaluatorsSet);
171: }
172: evaluatorsSet.add(e);
173: }
174:
175: public void removeASTEvaluator(ASTEvaluator e) {
176: if (evaluatorsMap != null) {
177: Set<ASTEvaluator> evaluatorsSet = evaluatorsMap.get(e
178: .getFeatureName());
179: if (evaluatorsSet != null)
180: evaluatorsSet.remove(e);
181: }
182: }
183:
184: public void fire(final State state,
185: final List<ParserManagerListener> listeners,
186: final Map<String, Set<ASTEvaluator>> evaluators,
187: final ASTNode root) {
188: if (root == null)
189: throw new NullPointerException();
190: parsingTask = rp.post(new Runnable() {
191: public void run() {
192: cancel[0] = false;
193: fire2(state, listeners, evaluators, root);
194: }
195: });
196: }
197:
198: // private methods .........................................................
199:
200: private RequestProcessor.Task parsingTask;
201:
202: private synchronized void startParsing() {
203: setChange(State.PARSING, ast);
204: cancel[0] = true;
205: if (parsingTask != null) {
206: parsingTask.cancel();
207: }
208: parsingTask = rp.post(new Runnable() {
209: public void run() {
210: cancel[0] = false;
211: parse();
212: }
213: }, 1000);
214: }
215:
216: private void setChange(State state, ASTNode root) {
217: if (state == this .state)
218: return;
219: this .state = state;
220: this .ast = root;
221: List<ParserManagerListener> listeners = this .listeners == null ? null
222: : new ArrayList<ParserManagerListener>(this .listeners);
223: Map<String, Set<ASTEvaluator>> evaluatorsMap = this .evaluatorsMap == null ? null
224: : new HashMap<String, Set<ASTEvaluator>>(
225: this .evaluatorsMap);
226: fire2(state, listeners, evaluatorsMap, root);
227: }
228:
229: private void fire2(State state,
230: List<ParserManagerListener> listeners,
231: Map<String, Set<ASTEvaluator>> evaluators, ASTNode root) {
232:
233: if (state == State.PARSING)
234: return;
235: if (evaluators != null) {
236: if (!evaluators.isEmpty()) {
237: Iterator<Set<ASTEvaluator>> it = evaluators.values()
238: .iterator();
239: while (it.hasNext()) {
240: Iterator<ASTEvaluator> it2 = it.next().iterator();
241: while (it2.hasNext()) {
242: ASTEvaluator e = it2.next();
243: e.beforeEvaluation(state, root);
244: if (cancel[0])
245: return;
246: }
247: }
248: //times = new HashMap<Object,Long> ();
249: evaluate(state, root, new ArrayList<ASTItem>(),
250: evaluators //, times
251: ); //iit = times.keySet ().iterator ();while (iit.hasNext()) {Object object = iit.next();S ystem.out.println(" Evaluator " + object + " : " + times.get (object));}
252: if (cancel[0])
253: return;
254: it = evaluators.values().iterator();
255: while (it.hasNext()) {
256: Iterator<ASTEvaluator> it2 = it.next().iterator();
257: while (it2.hasNext()) {
258: ASTEvaluator e = it2.next();
259: e.afterEvaluation(state, root);
260: if (cancel[0])
261: return;
262: }
263: }
264: }
265: }
266:
267: if (listeners != null) {
268: Iterator<ParserManagerListener> it = listeners.iterator();
269: while (it.hasNext()) {
270: ParserManagerListener l = it.next(); //long start = System.currentTimeMillis ();
271: l.parsed(state, ast);
272: //Long t = times.get (l);if (t == null) t = new Long (0);times.put (l, t.longValue () + S ystem.currentTimeMillis () - start);
273: if (cancel[0])
274: return;
275: }
276: } //Iterator iit = times.keySet ().iterator ();while (iit.hasNext()) {Object object = iit.next();S ystem.out.println(" Listener " + object + " : " + times.get (object));}
277: }
278:
279: private void evaluate(State state, ASTItem item,
280: List<ASTItem> path,
281: Map<String, Set<ASTEvaluator>> evaluatorsMap2 //, Map<Object,Long> times
282: ) {
283: path.add(item);
284: Language language = (Language) item.getLanguage();
285: if (language != null)
286: language.getFeatureList().evaluate(state, path,
287: evaluatorsMap2 //, times
288: );
289: Iterator<ASTItem> it2 = item.getChildren().iterator();
290: while (it2.hasNext()) {
291: if (cancel[0])
292: return;
293: evaluate(state, it2.next(), path, evaluatorsMap2 //, times
294: );
295: }
296: path.remove(path.size() - 1);
297: }
298:
299: private void parse() {
300: setChange(State.PARSING, ast);
301: String mimeType = (String) document.getProperty("mimeType");
302: Language language = getLanguage(mimeType);
303: LLSyntaxAnalyser analyser = language.getAnalyser(); //long start = System.currentTimeMillis ();
304: TokenInput input = createTokenInput();
305: if (cancel[0])
306: return; //S ystem.out.println ("lex " + (System.currentTimeMillis () - start));start = System.currentTimeMillis ();
307: List<SyntaxError> newSyntaxErrors = new ArrayList<SyntaxError>();
308: try {
309: ast = analyser.read(input, true, newSyntaxErrors, cancel); //S ystem.out.println ("syntax " + (System.currentTimeMillis () - start));
310: syntaxErrors = newSyntaxErrors;
311: } catch (ParseException ex) {
312: // should not be called - read (skipErrors == true)
313: Utils.notify(ex);
314: ast = ASTNode.create(language, "Root", 0);
315: setChange(State.OK, ast); //S ystem.out.println ("fire " + (System.currentTimeMillis () - start));
316: return;
317: }
318: if (cancel[0])
319: return; //long start = System.currentTimeMillis ();
320: try {
321: Feature astProperties = language.getFeatureList()
322: .getFeature("AST");
323: if (astProperties != null) {
324: ASTNode processedAst = (ASTNode) astProperties
325: .getValue("process", SyntaxContext.create(
326: document, ASTPath.create(ast)));
327: if (processedAst != null) {
328: ast = processedAst;
329: }
330: }
331: } catch (Exception ex) {
332: Utils.notify(ex);
333: ast = ASTNode.create(language, "Root", 0);
334: } //start = System.currentTimeMillis () - start;if (start > 100)S ystem.out.println ("postprocess " + start);
335: if (ast == null) {
336: Utils.notify(new NullPointerException());
337: ast = ASTNode.create(language, "Root", 0);
338: } //start = System.currentTimeMillis ();
339: setChange(State.OK, ast); //S ystem.out.println ("fire " + (System.currentTimeMillis () - start));
340: }
341:
342: private TokenInput createTokenInput() {
343: final TokenInput[] ret = new TokenInput[1];
344: document.render(new Runnable() {
345: public void run() {
346: if (tokenHierarchy == null) {
347: ret[0] = TokenInputUtils.create(Collections
348: .<ASTToken> emptyList());
349: return;
350: }
351: TokenSequence ts = tokenHierarchy.tokenSequence();
352: if (ts == null) {
353: ret[0] = TokenInputUtils.create(Collections
354: .<ASTToken> emptyList());
355: return;
356: }
357: List<ASTToken> tokens = getTokens(ts);
358: if (cancel[0]) {
359: // Leave null in ret[0]
360: return;
361: }
362: ret[0] = TokenInputUtils.create(tokens);
363: }
364: });
365: return ret[0];
366: }
367:
368: private List<ASTToken> getTokens(TokenSequence ts) {
369: if (ts == null)
370: return null;
371: Language language = null;
372: try {
373: language = LanguagesManager.getDefault().getLanguage(
374: ts.language().mimeType());
375: } catch (LanguageDefinitionNotFoundException ex) {
376: }
377: List<ASTToken> tokens = new ArrayList<ASTToken>();
378: if (!ts.moveNext())
379: return tokens;
380: Token t = ts.token();
381: int type = t.id().ordinal();
382: int offset = ts.offset();
383: String ttype = (String) t.getProperty("type");
384: List<ASTToken> firstInjection = null;
385: if (ttype == SLexer.INJECTED_CODE) {
386: // first token can be injected
387: TokenSequence ts2 = ts.embedded();
388: firstInjection = getTokens(ts2);
389: if (!ts.moveNext()) {
390: tokens.add(ASTToken.create(language, 0, "", offset, 0,
391: firstInjection));
392: return tokens;
393: }
394: t = ts.token();
395: type = t.id().ordinal();
396: offset = ts.offset();
397: ttype = (String) t.getProperty("type");
398: }
399: for (;;) {
400: if (cancel[0])
401: return null;
402: if (ttype == null) {
403: List<ASTToken> children = getTokens(ts.embedded());
404: if (firstInjection != null) {
405: if (children != null)
406: children.addAll(firstInjection);
407: else
408: children = firstInjection;
409: firstInjection = null;
410: }
411: tokens.add(ASTToken.create(language, type, t.text()
412: .toString(), offset, t.length(), children));
413: children = null;
414: } else if (ttype == SLexer.CONTINUOUS_TOKEN_START) {
415: StringBuilder sb = new StringBuilder(t.text());
416: List<ASTToken> children = new ArrayList<ASTToken>();
417: TokenSequence ts2 = ts.embedded();
418: while (ts.moveNext()) {
419: if (cancel[0])
420: return null;
421: t = ts.token();
422: ttype = (String) t.getProperty("type");
423: if (ttype == null) {
424: ts.movePrevious();
425: break;
426: }
427: if (ttype == SLexer.INJECTED_CODE) {
428: ts2 = ts.embedded();
429: if (ts2 != null) {
430: List<ASTToken> tokens2 = getTokens(ts2);
431: if (cancel[0])
432: return null;
433: children.addAll(tokens2);
434: }
435: continue;
436: }
437: if (ttype == SLexer.CONTINUOUS_TOKEN_START) {
438: ts.movePrevious();
439: break;
440: }
441: if (ttype != SLexer.CONTINUOUS_TOKEN)
442: throw new IllegalArgumentException();
443: if (type != t.id().ordinal())
444: throw new IllegalArgumentException();
445: sb.append(t.text());
446: }
447: int no = ts.offset() + ts.token().length();
448: tokens.add(ASTToken.create(language, type, sb
449: .toString(), offset, no - offset, children));
450: } else
451: throw new IllegalArgumentException();
452: if (!ts.moveNext())
453: return tokens;
454: t = ts.token();
455: type = t.id().ordinal();
456: offset = ts.offset();
457: ttype = (String) t.getProperty("type");
458: }
459: }
460:
461: private Language getLanguage(String mimeType) {
462: try {
463: return LanguagesManager.getDefault().getLanguage(mimeType);
464: } catch (LanguageDefinitionNotFoundException ex) {
465: return Language.create(LanguagesManager
466: .normalizeMimeType(mimeType));
467: }
468: }
469:
470: private static Map<Document, WeakReference<ParserManager>> managers = new WeakHashMap<Document, WeakReference<ParserManager>>();
471:
472: // HACK
473: static void refreshHack() {
474: Iterator<Document> it = managers.keySet().iterator();
475: while (it.hasNext()) {
476: AbstractDocument document = (AbstractDocument) it.next();
477: document.readLock();
478: try {
479: MutableTextInput mti = (MutableTextInput) document
480: .getProperty(MutableTextInput.class);
481: mti.tokenHierarchyControl().rebuild();
482: } finally {
483: document.readUnlock();
484: }
485: // final StyledDocument document = (StyledDocument) it.next ();
486: // NbDocument.runAtomic (document, new Runnable () {
487: // public void run() {
488: // MutableTextInput mti = (MutableTextInput) document.getProperty (MutableTextInput.class);
489: // mti.tokenHierarchyControl ().rebuild ();
490: // }
491: // });
492: }
493: }
494:
495: // innerclasses ............................................................
496:
497: private static class DocListener implements TokenHierarchyListener {
498:
499: private WeakReference<ParserManagerImpl> pmwr;
500:
501: DocListener(ParserManagerImpl pm, TokenHierarchy hierarchy) {
502: pmwr = new WeakReference<ParserManagerImpl>(pm);
503: hierarchy.addTokenHierarchyListener(this );
504: }
505:
506: private ParserManagerImpl getPM() {
507: ParserManagerImpl pm = pmwr.get();
508: if (pm != null)
509: return pm;
510: return null;
511: }
512:
513: public void tokenHierarchyChanged(TokenHierarchyEvent evt) {
514: ParserManagerImpl pm = getPM();
515: if (pm == null)
516: return;
517: pm.startParsing();
518: }
519: }
520: }
|