001: // $ANTLR : "preproc.g" -> "Preprocessor.java"$
002:
003: package persistence.antlr.preprocessor;
004:
005: import persistence.antlr.TokenBuffer;
006: import persistence.antlr.TokenStreamException;
007: import persistence.antlr.TokenStreamIOException;
008: import persistence.antlr.ANTLRException;
009: import persistence.antlr.LLkParser;
010: import persistence.antlr.Token;
011: import persistence.antlr.TokenStream;
012: import persistence.antlr.RecognitionException;
013: import persistence.antlr.NoViableAltException;
014: import persistence.antlr.MismatchedTokenException;
015: import persistence.antlr.SemanticException;
016: import persistence.antlr.ParserSharedInputState;
017: import persistence.antlr.collections.impl.BitSet;
018:
019: import persistence.antlr.collections.impl.IndexedVector;
020: import java.util.Hashtable;
021: import persistence.antlr.preprocessor.Grammar;
022:
023: public class Preprocessor extends persistence.antlr.LLkParser implements
024: PreprocessorTokenTypes {
025:
026: // This chunk of error reporting code provided by Brian Smith
027:
028: private persistence.antlr.Tool antlrTool;
029:
030: /** In order to make it so existing subclasses don't break, we won't require
031: * that the persistence.antlr.Tool instance be passed as a constructor element. Instead,
032: * the persistence.antlr.Tool instance should register itself via {@link #initTool(persistence.antlr.Tool)}
033: * @throws IllegalStateException if a tool has already been registered
034: * @since 2.7.2
035: */
036: public void setTool(persistence.antlr.Tool tool) {
037: if (antlrTool == null) {
038: antlrTool = tool;
039: } else {
040: throw new IllegalStateException(
041: "persistence.antlr.Tool already registered");
042: }
043: }
044:
045: /** @since 2.7.2 */
046: protected persistence.antlr.Tool getTool() {
047: return antlrTool;
048: }
049:
050: /** Delegates the error message to the tool if any was registered via
051: * {@link #initTool(persistence.antlr.Tool)}
052: * @since 2.7.2
053: */
054: public void reportError(String s) {
055: if (getTool() != null) {
056: getTool().error(s, getFilename(), -1, -1);
057: } else {
058: super .reportError(s);
059: }
060: }
061:
062: /** Delegates the error message to the tool if any was registered via
063: * {@link #initTool(persistence.antlr.Tool)}
064: * @since 2.7.2
065: */
066: public void reportError(RecognitionException e) {
067: if (getTool() != null) {
068: getTool().error(e.getErrorMessage(), e.getFilename(),
069: e.getLine(), e.getColumn());
070: } else {
071: super .reportError(e);
072: }
073: }
074:
075: /** Delegates the warning message to the tool if any was registered via
076: * {@link #initTool(persistence.antlr.Tool)}
077: * @since 2.7.2
078: */
079: public void reportWarning(String s) {
080: if (getTool() != null) {
081: getTool().warning(s, getFilename(), -1, -1);
082: } else {
083: super .reportWarning(s);
084: }
085: }
086:
087: protected Preprocessor(TokenBuffer tokenBuf, int k) {
088: super (tokenBuf, k);
089: tokenNames = _tokenNames;
090: }
091:
092: public Preprocessor(TokenBuffer tokenBuf) {
093: this (tokenBuf, 1);
094: }
095:
096: protected Preprocessor(TokenStream lexer, int k) {
097: super (lexer, k);
098: tokenNames = _tokenNames;
099: }
100:
101: public Preprocessor(TokenStream lexer) {
102: this (lexer, 1);
103: }
104:
105: public Preprocessor(ParserSharedInputState state) {
106: super (state, 1);
107: tokenNames = _tokenNames;
108: }
109:
110: public final void grammarFile(Hierarchy hier, String file)
111: throws RecognitionException, TokenStreamException {
112:
113: Token hdr = null;
114:
115: Grammar gr;
116: IndexedVector opt = null;
117:
118: try { // for error handling
119: {
120: _loop265: do {
121: if ((LA(1) == HEADER_ACTION)) {
122: hdr = LT(1);
123: match(HEADER_ACTION);
124: hier.getFile(file).addHeaderAction(
125: hdr.getText());
126: } else {
127: break _loop265;
128: }
129:
130: } while (true);
131: }
132: {
133: switch (LA(1)) {
134: case OPTIONS_START: {
135: opt = optionSpec(null);
136: break;
137: }
138: case EOF:
139: case ACTION:
140: case LITERAL_class: {
141: break;
142: }
143: default: {
144: throw new NoViableAltException(LT(1), getFilename());
145: }
146: }
147: }
148: {
149: _loop268: do {
150: if ((LA(1) == ACTION || LA(1) == LITERAL_class)) {
151: gr = class_def(file, hier);
152:
153: if (gr != null && opt != null) {
154: hier.getFile(file).setOptions(opt);
155: }
156: if (gr != null) {
157: gr.setFileName(file);
158: hier.addGrammar(gr);
159: }
160:
161: } else {
162: break _loop268;
163: }
164:
165: } while (true);
166: }
167: match(Token.EOF_TYPE);
168: } catch (RecognitionException ex) {
169: reportError(ex);
170: consume();
171: consumeUntil(_tokenSet_0);
172: }
173: }
174:
175: public final IndexedVector optionSpec(Grammar gr)
176: throws RecognitionException, TokenStreamException {
177: IndexedVector options;
178:
179: Token op = null;
180: Token rhs = null;
181:
182: options = new IndexedVector();
183:
184: try { // for error handling
185: match(OPTIONS_START);
186: {
187: _loop280: do {
188: if ((LA(1) == ID)) {
189: op = LT(1);
190: match(ID);
191: rhs = LT(1);
192: match(ASSIGN_RHS);
193:
194: Option newOp = new Option(op.getText(), rhs
195: .getText(), gr);
196: options.appendElement(newOp.getName(), newOp);
197: if (gr != null
198: && op.getText().equals("importVocab")) {
199: gr.specifiedVocabulary = true;
200: gr.importVocab = rhs.getText();
201: } else if (gr != null
202: && op.getText().equals("exportVocab")) {
203: // don't want ';' included in outputVocab.
204: // This is heinously inconsistent! Ugh.
205: gr.exportVocab = rhs.getText().substring(0,
206: rhs.getText().length() - 1);
207: gr.exportVocab = gr.exportVocab.trim();
208: }
209:
210: } else {
211: break _loop280;
212: }
213:
214: } while (true);
215: }
216: match(RCURLY);
217: } catch (RecognitionException ex) {
218: reportError(ex);
219: consume();
220: consumeUntil(_tokenSet_1);
221: }
222: return options;
223: }
224:
225: public final Grammar class_def(String file, Hierarchy hier)
226: throws RecognitionException, TokenStreamException {
227: Grammar gr;
228:
229: Token preamble = null;
230: Token sub = null;
231: Token sup = null;
232: Token tk = null;
233: Token memberA = null;
234:
235: gr = null;
236: IndexedVector rules = new IndexedVector(100);
237: IndexedVector classOptions = null;
238: String sc = null;
239:
240: try { // for error handling
241: {
242: switch (LA(1)) {
243: case ACTION: {
244: preamble = LT(1);
245: match(ACTION);
246: break;
247: }
248: case LITERAL_class: {
249: break;
250: }
251: default: {
252: throw new NoViableAltException(LT(1), getFilename());
253: }
254: }
255: }
256: match(LITERAL_class);
257: sub = LT(1);
258: match(ID);
259: match(LITERAL_extends);
260: sup = LT(1);
261: match(ID);
262: {
263: switch (LA(1)) {
264: case SUBRULE_BLOCK: {
265: sc = super Class();
266: break;
267: }
268: case SEMI: {
269: break;
270: }
271: default: {
272: throw new NoViableAltException(LT(1), getFilename());
273: }
274: }
275: }
276: match(SEMI);
277:
278: gr = (Grammar) hier.getGrammar(sub.getText());
279: if (gr != null) {
280: // antlr.Tool.toolError("redefinition of grammar "+gr.getName()+" ignored");
281: gr = null;
282: throw new SemanticException("redefinition of grammar "
283: + sub.getText(), file, sub.getLine(), sub
284: .getColumn());
285: } else {
286: gr = new Grammar(hier.getTool(), sub.getText(), sup
287: .getText(), rules);
288: gr.super Class = sc;
289: if (preamble != null) {
290: gr.setPreambleAction(preamble.getText());
291: }
292: }
293:
294: {
295: switch (LA(1)) {
296: case OPTIONS_START: {
297: classOptions = optionSpec(gr);
298: break;
299: }
300: case ACTION:
301: case ID:
302: case TOKENS_SPEC:
303: case LITERAL_protected:
304: case LITERAL_private:
305: case LITERAL_public: {
306: break;
307: }
308: default: {
309: throw new NoViableAltException(LT(1), getFilename());
310: }
311: }
312: }
313:
314: if (gr != null) {
315: gr.setOptions(classOptions);
316: }
317:
318: {
319: switch (LA(1)) {
320: case TOKENS_SPEC: {
321: tk = LT(1);
322: match(TOKENS_SPEC);
323: gr.setTokenSection(tk.getText());
324: break;
325: }
326: case ACTION:
327: case ID:
328: case LITERAL_protected:
329: case LITERAL_private:
330: case LITERAL_public: {
331: break;
332: }
333: default: {
334: throw new NoViableAltException(LT(1), getFilename());
335: }
336: }
337: }
338: {
339: switch (LA(1)) {
340: case ACTION: {
341: memberA = LT(1);
342: match(ACTION);
343: gr.setMemberAction(memberA.getText());
344: break;
345: }
346: case ID:
347: case LITERAL_protected:
348: case LITERAL_private:
349: case LITERAL_public: {
350: break;
351: }
352: default: {
353: throw new NoViableAltException(LT(1), getFilename());
354: }
355: }
356: }
357: {
358: int _cnt277 = 0;
359: _loop277: do {
360: if ((_tokenSet_2.member(LA(1)))) {
361: rule(gr);
362: } else {
363: if (_cnt277 >= 1) {
364: break _loop277;
365: } else {
366: throw new NoViableAltException(LT(1),
367: getFilename());
368: }
369: }
370:
371: _cnt277++;
372: } while (true);
373: }
374: } catch (RecognitionException ex) {
375: reportError(ex);
376: consume();
377: consumeUntil(_tokenSet_3);
378: }
379: return gr;
380: }
381:
382: public final String super Class() throws RecognitionException,
383: TokenStreamException {
384: String sup;
385:
386: sup = LT(1).getText();
387:
388: try { // for error handling
389: match(SUBRULE_BLOCK);
390: } catch (RecognitionException ex) {
391: reportError(ex);
392: consume();
393: consumeUntil(_tokenSet_4);
394: }
395: return sup;
396: }
397:
398: public final void rule(Grammar gr) throws RecognitionException,
399: TokenStreamException {
400:
401: Token r = null;
402: Token arg = null;
403: Token ret = null;
404: Token init = null;
405: Token blk = null;
406:
407: IndexedVector o = null; // options for rule
408: String vis = null;
409: boolean bang = false;
410: String eg = null, thr = "";
411:
412: try { // for error handling
413: {
414: switch (LA(1)) {
415: case LITERAL_protected: {
416: match(LITERAL_protected);
417: vis = "protected";
418: break;
419: }
420: case LITERAL_private: {
421: match(LITERAL_private);
422: vis = "private";
423: break;
424: }
425: case LITERAL_public: {
426: match(LITERAL_public);
427: vis = "public";
428: break;
429: }
430: case ID: {
431: break;
432: }
433: default: {
434: throw new NoViableAltException(LT(1), getFilename());
435: }
436: }
437: }
438: r = LT(1);
439: match(ID);
440: {
441: switch (LA(1)) {
442: case BANG: {
443: match(BANG);
444: bang = true;
445: break;
446: }
447: case ACTION:
448: case OPTIONS_START:
449: case ARG_ACTION:
450: case LITERAL_returns:
451: case RULE_BLOCK:
452: case LITERAL_throws: {
453: break;
454: }
455: default: {
456: throw new NoViableAltException(LT(1), getFilename());
457: }
458: }
459: }
460: {
461: switch (LA(1)) {
462: case ARG_ACTION: {
463: arg = LT(1);
464: match(ARG_ACTION);
465: break;
466: }
467: case ACTION:
468: case OPTIONS_START:
469: case LITERAL_returns:
470: case RULE_BLOCK:
471: case LITERAL_throws: {
472: break;
473: }
474: default: {
475: throw new NoViableAltException(LT(1), getFilename());
476: }
477: }
478: }
479: {
480: switch (LA(1)) {
481: case LITERAL_returns: {
482: match(LITERAL_returns);
483: ret = LT(1);
484: match(ARG_ACTION);
485: break;
486: }
487: case ACTION:
488: case OPTIONS_START:
489: case RULE_BLOCK:
490: case LITERAL_throws: {
491: break;
492: }
493: default: {
494: throw new NoViableAltException(LT(1), getFilename());
495: }
496: }
497: }
498: {
499: switch (LA(1)) {
500: case LITERAL_throws: {
501: thr = throwsSpec();
502: break;
503: }
504: case ACTION:
505: case OPTIONS_START:
506: case RULE_BLOCK: {
507: break;
508: }
509: default: {
510: throw new NoViableAltException(LT(1), getFilename());
511: }
512: }
513: }
514: {
515: switch (LA(1)) {
516: case OPTIONS_START: {
517: o = optionSpec(null);
518: break;
519: }
520: case ACTION:
521: case RULE_BLOCK: {
522: break;
523: }
524: default: {
525: throw new NoViableAltException(LT(1), getFilename());
526: }
527: }
528: }
529: {
530: switch (LA(1)) {
531: case ACTION: {
532: init = LT(1);
533: match(ACTION);
534: break;
535: }
536: case RULE_BLOCK: {
537: break;
538: }
539: default: {
540: throw new NoViableAltException(LT(1), getFilename());
541: }
542: }
543: }
544: blk = LT(1);
545: match(RULE_BLOCK);
546: eg = exceptionGroup();
547:
548: String rtext = blk.getText() + eg;
549: Rule ppr = new Rule(r.getText(), rtext, o, gr);
550: ppr.setThrowsSpec(thr);
551: if (arg != null) {
552: ppr.setArgs(arg.getText());
553: }
554: if (ret != null) {
555: ppr.setReturnValue(ret.getText());
556: }
557: if (init != null) {
558: ppr.setInitAction(init.getText());
559: }
560: if (bang) {
561: ppr.setBang();
562: }
563: ppr.setVisibility(vis);
564: if (gr != null) {
565: gr.addRule(ppr);
566: }
567:
568: } catch (RecognitionException ex) {
569: reportError(ex);
570: consume();
571: consumeUntil(_tokenSet_5);
572: }
573: }
574:
575: public final String throwsSpec() throws RecognitionException,
576: TokenStreamException {
577: String t;
578:
579: Token a = null;
580: Token b = null;
581: t = "throws ";
582:
583: try { // for error handling
584: match(LITERAL_throws);
585: a = LT(1);
586: match(ID);
587: t += a.getText();
588: {
589: _loop291: do {
590: if ((LA(1) == COMMA)) {
591: match(COMMA);
592: b = LT(1);
593: match(ID);
594: t += "," + b.getText();
595: } else {
596: break _loop291;
597: }
598:
599: } while (true);
600: }
601: } catch (RecognitionException ex) {
602: reportError(ex);
603: consume();
604: consumeUntil(_tokenSet_6);
605: }
606: return t;
607: }
608:
609: public final String exceptionGroup() throws RecognitionException,
610: TokenStreamException {
611: String g;
612:
613: String e = null;
614: g = "";
615:
616: try { // for error handling
617: {
618: _loop294: do {
619: if ((LA(1) == LITERAL_exception)) {
620: e = exceptionSpec();
621: g += e;
622: } else {
623: break _loop294;
624: }
625:
626: } while (true);
627: }
628: } catch (RecognitionException ex) {
629: reportError(ex);
630: consume();
631: consumeUntil(_tokenSet_5);
632: }
633: return g;
634: }
635:
636: public final String exceptionSpec() throws RecognitionException,
637: TokenStreamException {
638: String es;
639:
640: Token aa = null;
641: String h = null;
642: es = System.getProperty("line.separator") + "exception ";
643:
644: try { // for error handling
645: match(LITERAL_exception);
646: {
647: switch (LA(1)) {
648: case ARG_ACTION: {
649: aa = LT(1);
650: match(ARG_ACTION);
651: es += aa.getText();
652: break;
653: }
654: case EOF:
655: case ACTION:
656: case LITERAL_class:
657: case ID:
658: case LITERAL_protected:
659: case LITERAL_private:
660: case LITERAL_public:
661: case LITERAL_exception:
662: case LITERAL_catch: {
663: break;
664: }
665: default: {
666: throw new NoViableAltException(LT(1), getFilename());
667: }
668: }
669: }
670: {
671: _loop298: do {
672: if ((LA(1) == LITERAL_catch)) {
673: h = exceptionHandler();
674: es += h;
675: } else {
676: break _loop298;
677: }
678:
679: } while (true);
680: }
681: } catch (RecognitionException ex) {
682: reportError(ex);
683: consume();
684: consumeUntil(_tokenSet_7);
685: }
686: return es;
687: }
688:
689: public final String exceptionHandler() throws RecognitionException,
690: TokenStreamException {
691: String h;
692:
693: Token a1 = null;
694: Token a2 = null;
695: h = null;
696:
697: try { // for error handling
698: match(LITERAL_catch);
699: a1 = LT(1);
700: match(ARG_ACTION);
701: a2 = LT(1);
702: match(ACTION);
703: h = System.getProperty("line.separator") + "catch "
704: + a1.getText() + " " + a2.getText();
705: } catch (RecognitionException ex) {
706: reportError(ex);
707: consume();
708: consumeUntil(_tokenSet_8);
709: }
710: return h;
711: }
712:
713: public static final String[] _tokenNames = { "<0>", "EOF", "<2>",
714: "NULL_TREE_LOOKAHEAD", "\"tokens\"", "HEADER_ACTION",
715: "SUBRULE_BLOCK", "ACTION", "\"class\"", "ID",
716: "\"extends\"", "SEMI", "TOKENS_SPEC", "OPTIONS_START",
717: "ASSIGN_RHS", "RCURLY", "\"protected\"", "\"private\"",
718: "\"public\"", "BANG", "ARG_ACTION", "\"returns\"",
719: "RULE_BLOCK", "\"throws\"", "COMMA", "\"exception\"",
720: "\"catch\"", "ALT", "ELEMENT", "LPAREN", "RPAREN",
721: "ID_OR_KEYWORD", "CURLY_BLOCK_SCARF", "WS", "NEWLINE",
722: "COMMENT", "SL_COMMENT", "ML_COMMENT", "CHAR_LITERAL",
723: "STRING_LITERAL", "ESC", "DIGIT", "XDIGIT" };
724:
725: private static final long[] mk_tokenSet_0() {
726: long[] data = { 2L, 0L };
727: return data;
728: }
729:
730: public static final BitSet _tokenSet_0 = new BitSet(mk_tokenSet_0());
731:
732: private static final long[] mk_tokenSet_1() {
733: long[] data = { 4658050L, 0L };
734: return data;
735: }
736:
737: public static final BitSet _tokenSet_1 = new BitSet(mk_tokenSet_1());
738:
739: private static final long[] mk_tokenSet_2() {
740: long[] data = { 459264L, 0L };
741: return data;
742: }
743:
744: public static final BitSet _tokenSet_2 = new BitSet(mk_tokenSet_2());
745:
746: private static final long[] mk_tokenSet_3() {
747: long[] data = { 386L, 0L };
748: return data;
749: }
750:
751: public static final BitSet _tokenSet_3 = new BitSet(mk_tokenSet_3());
752:
753: private static final long[] mk_tokenSet_4() {
754: long[] data = { 2048L, 0L };
755: return data;
756: }
757:
758: public static final BitSet _tokenSet_4 = new BitSet(mk_tokenSet_4());
759:
760: private static final long[] mk_tokenSet_5() {
761: long[] data = { 459650L, 0L };
762: return data;
763: }
764:
765: public static final BitSet _tokenSet_5 = new BitSet(mk_tokenSet_5());
766:
767: private static final long[] mk_tokenSet_6() {
768: long[] data = { 4202624L, 0L };
769: return data;
770: }
771:
772: public static final BitSet _tokenSet_6 = new BitSet(mk_tokenSet_6());
773:
774: private static final long[] mk_tokenSet_7() {
775: long[] data = { 34014082L, 0L };
776: return data;
777: }
778:
779: public static final BitSet _tokenSet_7 = new BitSet(mk_tokenSet_7());
780:
781: private static final long[] mk_tokenSet_8() {
782: long[] data = { 101122946L, 0L };
783: return data;
784: }
785:
786: public static final BitSet _tokenSet_8 = new BitSet(mk_tokenSet_8());
787:
788: }
|