0001: // $ANTLR 2.7.7 (2006-01-29): "antlr.g" -> "ANTLRParser.java"$
0002:
0003: /*
0004: [The "BSD licence"]
0005: Copyright (c) 2005-2006 Terence Parr
0006: All rights reserved.
0007:
0008: Redistribution and use in source and binary forms, with or without
0009: modification, are permitted provided that the following conditions
0010: are met:
0011: 1. Redistributions of source code must retain the above copyright
0012: notice, this list of conditions and the following disclaimer.
0013: 2. Redistributions in binary form must reproduce the above copyright
0014: notice, this list of conditions and the following disclaimer in the
0015: documentation and/or other materials provided with the distribution.
0016: 3. The name of the author may not be used to endorse or promote products
0017: derived from this software without specific prior written permission.
0018:
0019: THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
0020: IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
0021: OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
0022: IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
0023: INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
0024: NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
0025: DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
0026: THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
0027: (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
0028: THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
0029: */
0030: package org.antlr.tool;
0031:
0032: import java.util.*;
0033: import java.io.*;
0034: import org.antlr.analysis.*;
0035: import org.antlr.misc.*;
0036: import antlr.*;
0037:
0038: import antlr.TokenBuffer;
0039: import antlr.TokenStreamException;
0040: import antlr.TokenStreamIOException;
0041: import antlr.ANTLRException;
0042: import antlr.LLkParser;
0043: import antlr.Token;
0044: import antlr.TokenStream;
0045: import antlr.RecognitionException;
0046: import antlr.NoViableAltException;
0047: import antlr.MismatchedTokenException;
0048: import antlr.SemanticException;
0049: import antlr.ParserSharedInputState;
0050: import antlr.collections.impl.BitSet;
0051: import antlr.collections.AST;
0052: import java.util.Hashtable;
0053: import antlr.ASTFactory;
0054: import antlr.ASTPair;
0055: import antlr.collections.impl.ASTArray;
0056:
0057: /** Read in an ANTLR grammar and build an AST. Try not to do
0058: * any actions, just build the tree.
0059: *
0060: * The phases are:
0061: *
0062: * antlr.g (this file)
0063: * assign.types.g
0064: * define.g
0065: * buildnfa.g
0066: * antlr.print.g (optional)
0067: * codegen.g
0068: *
0069: * Terence Parr
0070: * University of San Francisco
0071: * 2005
0072: */
0073: public class ANTLRParser extends antlr.LLkParser implements
0074: ANTLRTokenTypes {
0075:
0076: Grammar grammar = null;
0077: protected int gtype = 0;
0078: protected String currentRuleName = null;
0079: protected GrammarAST currentBlockAST = null;
0080:
0081: /* this next stuff supports construction of the Tokens artificial rule.
0082: I hate having some partial functionality here, I like doing everything
0083: in future tree passes, but the Tokens rule is sensitive to filter mode.
0084: And if it adds syn preds, future tree passes will need to process the
0085: fragments defined in Tokens; a cyclic dependency.
0086: As of 1-17-06 then, Tokens is created for lexer grammars in the
0087: antlr grammar parser itself.
0088:
0089: This grammar is also sensitive to the backtrack grammar option that
0090: tells ANTLR to automatically backtrack when it can't compute a DFA.
0091:
0092: 7-2-06 I moved all option processing to antlr.g from define.g as I
0093: need backtrack option etc... for blocks. Got messy.
0094: */
0095: protected List lexerRuleNames = new ArrayList();
0096:
0097: public List getLexerRuleNames() {
0098: return lexerRuleNames;
0099: }
0100:
0101: protected GrammarAST setToBlockWithSet(GrammarAST b) {
0102: GrammarAST alt = (GrammarAST) astFactory.make((new ASTArray(3))
0103: .add((GrammarAST) astFactory.create(ALT, "ALT")).add(b)
0104: .add(
0105: (GrammarAST) astFactory.create(EOA,
0106: "<end-of-alt>")));
0107: prefixWithSynPred(alt);
0108: return (GrammarAST) astFactory.make((new ASTArray(3)).add(
0109: (GrammarAST) astFactory.create(BLOCK, "BLOCK"))
0110: .add(alt).add(
0111: (GrammarAST) astFactory.create(EOB,
0112: "<end-of-block>")));
0113: }
0114:
0115: /** Create a copy of the alt and make it into a BLOCK; all actions,
0116: * labels, tree operators, rewrites are removed.
0117: */
0118: protected GrammarAST createBlockFromDupAlt(GrammarAST alt) {
0119: //GrammarAST nalt = (GrammarAST)astFactory.dupTree(alt);
0120: GrammarAST nalt = GrammarAST.dupTreeNoActions(alt, null);
0121: GrammarAST blk = (GrammarAST) astFactory.make((new ASTArray(3))
0122: .add((GrammarAST) astFactory.create(BLOCK, "BLOCK"))
0123: .add(nalt).add(
0124: (GrammarAST) astFactory.create(EOB,
0125: "<end-of-block>")));
0126: return blk;
0127: }
0128:
0129: /** Rewrite alt to have a synpred as first element;
0130: * (xxx)=>xxx
0131: * but only if they didn't specify one manually.
0132: */
0133: protected void prefixWithSynPred(GrammarAST alt) {
0134: // if they want backtracking and it's not a lexer rule in combined grammar
0135: String autoBacktrack = (String) currentBlockAST
0136: .getOption("backtrack");
0137: if (autoBacktrack == null) {
0138: autoBacktrack = (String) grammar.getOption("backtrack");
0139: }
0140: if (autoBacktrack != null
0141: && autoBacktrack.equals("true")
0142: && !(gtype == COMBINED_GRAMMAR && Character
0143: .isUpperCase(currentRuleName.charAt(0)))
0144: && alt.getFirstChild().getType() != SYN_SEMPRED) {
0145: // duplicate alt and make a synpred block around that dup'd alt
0146: GrammarAST synpredBlockAST = createBlockFromDupAlt(alt);
0147:
0148: // Create a BACKTRACK_SEMPRED node as if user had typed this in
0149: // Effectively we replace (xxx)=>xxx with {synpredxxx}? xxx
0150: GrammarAST synpredAST = createSynSemPredFromBlock(
0151: synpredBlockAST, BACKTRACK_SEMPRED);
0152:
0153: // insert BACKTRACK_SEMPRED as first element of alt
0154: synpredAST.getLastSibling().setNextSibling(
0155: alt.getFirstChild());
0156: alt.setFirstChild(synpredAST);
0157: }
0158: }
0159:
0160: protected GrammarAST createSynSemPredFromBlock(
0161: GrammarAST synpredBlockAST, int synpredTokenType) {
0162: // add grammar fragment to a list so we can make fake rules for them
0163: // later.
0164: String predName = grammar.defineSyntacticPredicate(
0165: synpredBlockAST, currentRuleName);
0166: // convert (alpha)=> into {synpredN}? where N is some pred count
0167: // during code gen we convert to function call with templates
0168: String synpredinvoke = predName;
0169: GrammarAST p = (GrammarAST) astFactory.create(synpredTokenType,
0170: synpredinvoke);
0171: p.setEnclosingRule(currentRuleName);
0172: // track how many decisions have synpreds
0173: grammar.blocksWithSynPreds.add(currentBlockAST);
0174: return p;
0175: }
0176:
0177: public GrammarAST createSimpleRuleAST(String name,
0178: GrammarAST block, boolean fragment) {
0179: GrammarAST modifier = null;
0180: if (fragment) {
0181: modifier = (GrammarAST) astFactory.create(FRAGMENT,
0182: "fragment");
0183: }
0184: GrammarAST EORAST = (GrammarAST) astFactory.create(EOR,
0185: "<end-of-rule>");
0186: GrammarAST EOBAST = block.getLastChild();
0187: EORAST.setLine(EOBAST.getLine());
0188: EORAST.setColumn(EOBAST.getColumn());
0189: GrammarAST ruleAST = (GrammarAST) astFactory
0190: .make((new ASTArray(8)).add(
0191: (GrammarAST) astFactory.create(RULE, "rule"))
0192: .add((GrammarAST) astFactory.create(ID, name))
0193: .add(modifier).add(
0194: (GrammarAST) astFactory.create(ARG,
0195: "ARG")).add(
0196: (GrammarAST) astFactory.create(RET,
0197: "RET")).add(
0198: (GrammarAST) astFactory.create(SCOPE,
0199: "scope")).add(block)
0200: .add(EORAST));
0201: ruleAST.setLine(block.getLine());
0202: ruleAST.setColumn(block.getColumn());
0203: return ruleAST;
0204: }
0205:
0206: public void reportError(RecognitionException ex) {
0207: Token token = null;
0208: try {
0209: token = LT(1);
0210: } catch (TokenStreamException tse) {
0211: ErrorManager.internalError("can't get token???", tse);
0212: }
0213: ErrorManager.syntaxError(ErrorManager.MSG_SYNTAX_ERROR,
0214: grammar, token, "antlr: " + ex.toString(), ex);
0215: }
0216:
0217: public void cleanup(GrammarAST root) {
0218: if (gtype == LEXER_GRAMMAR) {
0219: String filter = (String) grammar.getOption("filter");
0220: GrammarAST tokensRuleAST = grammar
0221: .addArtificialMatchTokensRule(root, lexerRuleNames,
0222: filter != null && filter.equals("true"));
0223: }
0224: }
0225:
0226: protected ANTLRParser(TokenBuffer tokenBuf, int k) {
0227: super (tokenBuf, k);
0228: tokenNames = _tokenNames;
0229: buildTokenTypeASTClassMap();
0230: astFactory = new ASTFactory(getTokenTypeToASTClassMap());
0231: }
0232:
0233: public ANTLRParser(TokenBuffer tokenBuf) {
0234: this (tokenBuf, 2);
0235: }
0236:
0237: protected ANTLRParser(TokenStream lexer, int k) {
0238: super (lexer, k);
0239: tokenNames = _tokenNames;
0240: buildTokenTypeASTClassMap();
0241: astFactory = new ASTFactory(getTokenTypeToASTClassMap());
0242: }
0243:
0244: public ANTLRParser(TokenStream lexer) {
0245: this (lexer, 2);
0246: }
0247:
0248: public ANTLRParser(ParserSharedInputState state) {
0249: super (state, 2);
0250: tokenNames = _tokenNames;
0251: buildTokenTypeASTClassMap();
0252: astFactory = new ASTFactory(getTokenTypeToASTClassMap());
0253: }
0254:
0255: public final void grammar(Grammar g) throws RecognitionException,
0256: TokenStreamException {
0257:
0258: returnAST = null;
0259: ASTPair currentAST = new ASTPair();
0260: GrammarAST grammar_AST = null;
0261: Token cmt = null;
0262: GrammarAST cmt_AST = null;
0263: GrammarAST gr_AST = null;
0264: GrammarAST gid_AST = null;
0265: GrammarAST ts_AST = null;
0266: GrammarAST scopes_AST = null;
0267: GrammarAST a_AST = null;
0268: GrammarAST r_AST = null;
0269:
0270: this .grammar = g;
0271: GrammarAST opt = null;
0272: Token optionsStartToken = null;
0273: Map opts;
0274:
0275: try { // for error handling
0276: {
0277: switch (LA(1)) {
0278: case ACTION: {
0279: GrammarAST tmp1_AST = null;
0280: tmp1_AST = (GrammarAST) astFactory.create(LT(1));
0281: match(ACTION);
0282: break;
0283: }
0284: case PARSER:
0285: case DOC_COMMENT:
0286: case LITERAL_lexer:
0287: case LITERAL_tree:
0288: case LITERAL_grammar: {
0289: break;
0290: }
0291: default: {
0292: throw new NoViableAltException(LT(1), getFilename());
0293: }
0294: }
0295: }
0296: {
0297: switch (LA(1)) {
0298: case DOC_COMMENT: {
0299: cmt = LT(1);
0300: cmt_AST = (GrammarAST) astFactory.create(cmt);
0301: match(DOC_COMMENT);
0302: break;
0303: }
0304: case PARSER:
0305: case LITERAL_lexer:
0306: case LITERAL_tree:
0307: case LITERAL_grammar: {
0308: break;
0309: }
0310: default: {
0311: throw new NoViableAltException(LT(1), getFilename());
0312: }
0313: }
0314: }
0315: grammarType();
0316: gr_AST = (GrammarAST) returnAST;
0317: id();
0318: gid_AST = (GrammarAST) returnAST;
0319: GrammarAST tmp2_AST = null;
0320: tmp2_AST = (GrammarAST) astFactory.create(LT(1));
0321: match(SEMI);
0322: {
0323: switch (LA(1)) {
0324: case OPTIONS: {
0325: optionsStartToken = LT(1);
0326: opts = optionsSpec();
0327: grammar.setOptions(opts, optionsStartToken);
0328: opt = (GrammarAST) returnAST;
0329: break;
0330: }
0331: case TOKENS:
0332: case SCOPE:
0333: case FRAGMENT:
0334: case DOC_COMMENT:
0335: case AMPERSAND:
0336: case TOKEN_REF:
0337: case LITERAL_protected:
0338: case LITERAL_public:
0339: case LITERAL_private:
0340: case RULE_REF: {
0341: break;
0342: }
0343: default: {
0344: throw new NoViableAltException(LT(1), getFilename());
0345: }
0346: }
0347: }
0348: {
0349: switch (LA(1)) {
0350: case TOKENS: {
0351: tokensSpec();
0352: ts_AST = (GrammarAST) returnAST;
0353: break;
0354: }
0355: case SCOPE:
0356: case FRAGMENT:
0357: case DOC_COMMENT:
0358: case AMPERSAND:
0359: case TOKEN_REF:
0360: case LITERAL_protected:
0361: case LITERAL_public:
0362: case LITERAL_private:
0363: case RULE_REF: {
0364: break;
0365: }
0366: default: {
0367: throw new NoViableAltException(LT(1), getFilename());
0368: }
0369: }
0370: }
0371: attrScopes();
0372: scopes_AST = (GrammarAST) returnAST;
0373: {
0374: switch (LA(1)) {
0375: case AMPERSAND: {
0376: actions();
0377: a_AST = (GrammarAST) returnAST;
0378: break;
0379: }
0380: case FRAGMENT:
0381: case DOC_COMMENT:
0382: case TOKEN_REF:
0383: case LITERAL_protected:
0384: case LITERAL_public:
0385: case LITERAL_private:
0386: case RULE_REF: {
0387: break;
0388: }
0389: default: {
0390: throw new NoViableAltException(LT(1), getFilename());
0391: }
0392: }
0393: }
0394: rules();
0395: r_AST = (GrammarAST) returnAST;
0396: GrammarAST tmp3_AST = null;
0397: tmp3_AST = (GrammarAST) astFactory.create(LT(1));
0398: match(Token.EOF_TYPE);
0399: grammar_AST = (GrammarAST) currentAST.root;
0400:
0401: grammar_AST = (GrammarAST) astFactory
0402: .make((new ASTArray(2)).add(null).add(
0403: (GrammarAST) astFactory.make((new ASTArray(
0404: 8)).add(gr_AST).add(gid_AST).add(
0405: cmt_AST).add(opt).add(ts_AST).add(
0406: scopes_AST).add(a_AST).add(r_AST))));
0407: cleanup(grammar_AST);
0408:
0409: currentAST.root = grammar_AST;
0410: currentAST.child = grammar_AST != null
0411: && grammar_AST.getFirstChild() != null ? grammar_AST
0412: .getFirstChild()
0413: : grammar_AST;
0414: currentAST.advanceChildToEnd();
0415: } catch (RecognitionException ex) {
0416: reportError(ex);
0417: recover(ex, _tokenSet_0);
0418: }
0419: returnAST = grammar_AST;
0420: }
0421:
0422: public final void grammarType() throws RecognitionException,
0423: TokenStreamException {
0424:
0425: returnAST = null;
0426: ASTPair currentAST = new ASTPair();
0427: GrammarAST grammarType_AST = null;
0428: Token gr = null;
0429: GrammarAST gr_AST = null;
0430:
0431: try { // for error handling
0432: {
0433: switch (LA(1)) {
0434: case LITERAL_lexer: {
0435: match(LITERAL_lexer);
0436: gtype = LEXER_GRAMMAR;
0437: break;
0438: }
0439: case PARSER: {
0440: match(PARSER);
0441: gtype = PARSER_GRAMMAR;
0442: break;
0443: }
0444: case LITERAL_tree: {
0445: match(LITERAL_tree);
0446: gtype = TREE_GRAMMAR;
0447: break;
0448: }
0449: case LITERAL_grammar: {
0450: gtype = COMBINED_GRAMMAR;
0451: break;
0452: }
0453: default: {
0454: throw new NoViableAltException(LT(1), getFilename());
0455: }
0456: }
0457: }
0458: gr = LT(1);
0459: gr_AST = (GrammarAST) astFactory.create(gr);
0460: astFactory.addASTChild(currentAST, gr_AST);
0461: match(LITERAL_grammar);
0462: gr_AST.setType(gtype);
0463: grammarType_AST = (GrammarAST) currentAST.root;
0464: } catch (RecognitionException ex) {
0465: reportError(ex);
0466: recover(ex, _tokenSet_1);
0467: }
0468: returnAST = grammarType_AST;
0469: }
0470:
0471: public final void id() throws RecognitionException,
0472: TokenStreamException {
0473:
0474: returnAST = null;
0475: ASTPair currentAST = new ASTPair();
0476: GrammarAST id_AST = null;
0477:
0478: try { // for error handling
0479: switch (LA(1)) {
0480: case TOKEN_REF: {
0481: GrammarAST tmp7_AST = null;
0482: tmp7_AST = (GrammarAST) astFactory.create(LT(1));
0483: astFactory.addASTChild(currentAST, tmp7_AST);
0484: match(TOKEN_REF);
0485: id_AST = (GrammarAST) currentAST.root;
0486: id_AST.setType(ID);
0487: id_AST = (GrammarAST) currentAST.root;
0488: break;
0489: }
0490: case RULE_REF: {
0491: GrammarAST tmp8_AST = null;
0492: tmp8_AST = (GrammarAST) astFactory.create(LT(1));
0493: astFactory.addASTChild(currentAST, tmp8_AST);
0494: match(RULE_REF);
0495: id_AST = (GrammarAST) currentAST.root;
0496: id_AST.setType(ID);
0497: id_AST = (GrammarAST) currentAST.root;
0498: break;
0499: }
0500: default: {
0501: throw new NoViableAltException(LT(1), getFilename());
0502: }
0503: }
0504: } catch (RecognitionException ex) {
0505: reportError(ex);
0506: recover(ex, _tokenSet_2);
0507: }
0508: returnAST = id_AST;
0509: }
0510:
0511: public final Map optionsSpec() throws RecognitionException,
0512: TokenStreamException {
0513: Map opts = new HashMap();
0514:
0515: returnAST = null;
0516: ASTPair currentAST = new ASTPair();
0517: GrammarAST optionsSpec_AST = null;
0518:
0519: try { // for error handling
0520: GrammarAST tmp9_AST = null;
0521: tmp9_AST = (GrammarAST) astFactory.create(LT(1));
0522: astFactory.makeASTRoot(currentAST, tmp9_AST);
0523: match(OPTIONS);
0524: {
0525: int _cnt17 = 0;
0526: _loop17: do {
0527: if ((LA(1) == TOKEN_REF || LA(1) == RULE_REF)) {
0528: option(opts);
0529: astFactory.addASTChild(currentAST, returnAST);
0530: match(SEMI);
0531: } else {
0532: if (_cnt17 >= 1) {
0533: break _loop17;
0534: } else {
0535: throw new NoViableAltException(LT(1),
0536: getFilename());
0537: }
0538: }
0539:
0540: _cnt17++;
0541: } while (true);
0542: }
0543: match(RCURLY);
0544: optionsSpec_AST = (GrammarAST) currentAST.root;
0545: } catch (RecognitionException ex) {
0546: reportError(ex);
0547: recover(ex, _tokenSet_3);
0548: }
0549: returnAST = optionsSpec_AST;
0550: return opts;
0551: }
0552:
0553: public final void tokensSpec() throws RecognitionException,
0554: TokenStreamException {
0555:
0556: returnAST = null;
0557: ASTPair currentAST = new ASTPair();
0558: GrammarAST tokensSpec_AST = null;
0559:
0560: try { // for error handling
0561: GrammarAST tmp12_AST = null;
0562: tmp12_AST = (GrammarAST) astFactory.create(LT(1));
0563: astFactory.makeASTRoot(currentAST, tmp12_AST);
0564: match(TOKENS);
0565: {
0566: int _cnt22 = 0;
0567: _loop22: do {
0568: if ((LA(1) == TOKEN_REF)) {
0569: tokenSpec();
0570: astFactory.addASTChild(currentAST, returnAST);
0571: } else {
0572: if (_cnt22 >= 1) {
0573: break _loop22;
0574: } else {
0575: throw new NoViableAltException(LT(1),
0576: getFilename());
0577: }
0578: }
0579:
0580: _cnt22++;
0581: } while (true);
0582: }
0583: match(RCURLY);
0584: tokensSpec_AST = (GrammarAST) currentAST.root;
0585: } catch (RecognitionException ex) {
0586: reportError(ex);
0587: recover(ex, _tokenSet_4);
0588: }
0589: returnAST = tokensSpec_AST;
0590: }
0591:
0592: public final void attrScopes() throws RecognitionException,
0593: TokenStreamException {
0594:
0595: returnAST = null;
0596: ASTPair currentAST = new ASTPair();
0597: GrammarAST attrScopes_AST = null;
0598:
0599: try { // for error handling
0600: {
0601: _loop28: do {
0602: if ((LA(1) == SCOPE)) {
0603: attrScope();
0604: astFactory.addASTChild(currentAST, returnAST);
0605: } else {
0606: break _loop28;
0607: }
0608:
0609: } while (true);
0610: }
0611: attrScopes_AST = (GrammarAST) currentAST.root;
0612: } catch (RecognitionException ex) {
0613: reportError(ex);
0614: recover(ex, _tokenSet_5);
0615: }
0616: returnAST = attrScopes_AST;
0617: }
0618:
0619: public final void actions() throws RecognitionException,
0620: TokenStreamException {
0621:
0622: returnAST = null;
0623: ASTPair currentAST = new ASTPair();
0624: GrammarAST actions_AST = null;
0625:
0626: try { // for error handling
0627: {
0628: int _cnt11 = 0;
0629: _loop11: do {
0630: if ((LA(1) == AMPERSAND)) {
0631: action();
0632: astFactory.addASTChild(currentAST, returnAST);
0633: } else {
0634: if (_cnt11 >= 1) {
0635: break _loop11;
0636: } else {
0637: throw new NoViableAltException(LT(1),
0638: getFilename());
0639: }
0640: }
0641:
0642: _cnt11++;
0643: } while (true);
0644: }
0645: actions_AST = (GrammarAST) currentAST.root;
0646: } catch (RecognitionException ex) {
0647: reportError(ex);
0648: recover(ex, _tokenSet_6);
0649: }
0650: returnAST = actions_AST;
0651: }
0652:
0653: public final void rules() throws RecognitionException,
0654: TokenStreamException {
0655:
0656: returnAST = null;
0657: ASTPair currentAST = new ASTPair();
0658: GrammarAST rules_AST = null;
0659:
0660: try { // for error handling
0661: {
0662: int _cnt32 = 0;
0663: _loop32: do {
0664: if ((_tokenSet_6.member(LA(1)))) {
0665: rule();
0666: astFactory.addASTChild(currentAST, returnAST);
0667: } else {
0668: if (_cnt32 >= 1) {
0669: break _loop32;
0670: } else {
0671: throw new NoViableAltException(LT(1),
0672: getFilename());
0673: }
0674: }
0675:
0676: _cnt32++;
0677: } while (true);
0678: }
0679: rules_AST = (GrammarAST) currentAST.root;
0680: } catch (RecognitionException ex) {
0681: reportError(ex);
0682: recover(ex, _tokenSet_0);
0683: }
0684: returnAST = rules_AST;
0685: }
0686:
0687: /** Match stuff like @parser::members {int i;} */
0688: public final void action() throws RecognitionException,
0689: TokenStreamException {
0690:
0691: returnAST = null;
0692: ASTPair currentAST = new ASTPair();
0693: GrammarAST action_AST = null;
0694:
0695: try { // for error handling
0696: GrammarAST tmp14_AST = null;
0697: tmp14_AST = (GrammarAST) astFactory.create(LT(1));
0698: astFactory.makeASTRoot(currentAST, tmp14_AST);
0699: match(AMPERSAND);
0700: {
0701: if ((_tokenSet_7.member(LA(1))) && (LA(2) == COLON)) {
0702: actionScopeName();
0703: astFactory.addASTChild(currentAST, returnAST);
0704: match(COLON);
0705: match(COLON);
0706: } else if ((LA(1) == TOKEN_REF || LA(1) == RULE_REF)
0707: && (LA(2) == ACTION)) {
0708: } else {
0709: throw new NoViableAltException(LT(1), getFilename());
0710: }
0711:
0712: }
0713: id();
0714: astFactory.addASTChild(currentAST, returnAST);
0715: GrammarAST tmp17_AST = null;
0716: tmp17_AST = (GrammarAST) astFactory.create(LT(1));
0717: astFactory.addASTChild(currentAST, tmp17_AST);
0718: match(ACTION);
0719: action_AST = (GrammarAST) currentAST.root;
0720: } catch (RecognitionException ex) {
0721: reportError(ex);
0722: recover(ex, _tokenSet_5);
0723: }
0724: returnAST = action_AST;
0725: }
0726:
0727: /** Sometimes the scope names will collide with keywords; allow them as
0728: * ids for action scopes.
0729: */
0730: public final void actionScopeName() throws RecognitionException,
0731: TokenStreamException {
0732:
0733: returnAST = null;
0734: ASTPair currentAST = new ASTPair();
0735: GrammarAST actionScopeName_AST = null;
0736: Token l = null;
0737: GrammarAST l_AST = null;
0738: Token p = null;
0739: GrammarAST p_AST = null;
0740:
0741: try { // for error handling
0742: switch (LA(1)) {
0743: case TOKEN_REF:
0744: case RULE_REF: {
0745: id();
0746: astFactory.addASTChild(currentAST, returnAST);
0747: actionScopeName_AST = (GrammarAST) currentAST.root;
0748: break;
0749: }
0750: case LITERAL_lexer: {
0751: l = LT(1);
0752: l_AST = (GrammarAST) astFactory.create(l);
0753: astFactory.addASTChild(currentAST, l_AST);
0754: match(LITERAL_lexer);
0755: l_AST.setType(ID);
0756: actionScopeName_AST = (GrammarAST) currentAST.root;
0757: break;
0758: }
0759: case PARSER: {
0760: p = LT(1);
0761: p_AST = (GrammarAST) astFactory.create(p);
0762: astFactory.addASTChild(currentAST, p_AST);
0763: match(PARSER);
0764: p_AST.setType(ID);
0765: actionScopeName_AST = (GrammarAST) currentAST.root;
0766: break;
0767: }
0768: default: {
0769: throw new NoViableAltException(LT(1), getFilename());
0770: }
0771: }
0772: } catch (RecognitionException ex) {
0773: reportError(ex);
0774: recover(ex, _tokenSet_8);
0775: }
0776: returnAST = actionScopeName_AST;
0777: }
0778:
0779: public final void option(Map opts) throws RecognitionException,
0780: TokenStreamException {
0781:
0782: returnAST = null;
0783: ASTPair currentAST = new ASTPair();
0784: GrammarAST option_AST = null;
0785: GrammarAST o_AST = null;
0786:
0787: Object value = null;
0788:
0789: try { // for error handling
0790: id();
0791: o_AST = (GrammarAST) returnAST;
0792: astFactory.addASTChild(currentAST, returnAST);
0793: GrammarAST tmp18_AST = null;
0794: tmp18_AST = (GrammarAST) astFactory.create(LT(1));
0795: astFactory.makeASTRoot(currentAST, tmp18_AST);
0796: match(ASSIGN);
0797: value = optionValue();
0798: astFactory.addASTChild(currentAST, returnAST);
0799:
0800: opts.put(o_AST.getText(), value);
0801:
0802: option_AST = (GrammarAST) currentAST.root;
0803: } catch (RecognitionException ex) {
0804: reportError(ex);
0805: recover(ex, _tokenSet_9);
0806: }
0807: returnAST = option_AST;
0808: }
0809:
0810: public final Object optionValue() throws RecognitionException,
0811: TokenStreamException {
0812: Object value = null;
0813:
0814: returnAST = null;
0815: ASTPair currentAST = new ASTPair();
0816: GrammarAST optionValue_AST = null;
0817: GrammarAST x_AST = null;
0818: Token s = null;
0819: GrammarAST s_AST = null;
0820: Token c = null;
0821: GrammarAST c_AST = null;
0822: Token i = null;
0823: GrammarAST i_AST = null;
0824: Token ss = null;
0825: GrammarAST ss_AST = null;
0826:
0827: try { // for error handling
0828: switch (LA(1)) {
0829: case TOKEN_REF:
0830: case RULE_REF: {
0831: id();
0832: x_AST = (GrammarAST) returnAST;
0833: astFactory.addASTChild(currentAST, returnAST);
0834: value = x_AST.getText();
0835: optionValue_AST = (GrammarAST) currentAST.root;
0836: break;
0837: }
0838: case STRING_LITERAL: {
0839: s = LT(1);
0840: s_AST = (GrammarAST) astFactory.create(s);
0841: astFactory.addASTChild(currentAST, s_AST);
0842: match(STRING_LITERAL);
0843: String vs = s_AST.getText();
0844: value = vs.substring(1, vs.length() - 1);
0845: optionValue_AST = (GrammarAST) currentAST.root;
0846: break;
0847: }
0848: case CHAR_LITERAL: {
0849: c = LT(1);
0850: c_AST = (GrammarAST) astFactory.create(c);
0851: astFactory.addASTChild(currentAST, c_AST);
0852: match(CHAR_LITERAL);
0853: String vs = c_AST.getText();
0854: value = vs.substring(1, vs.length() - 1);
0855: optionValue_AST = (GrammarAST) currentAST.root;
0856: break;
0857: }
0858: case INT: {
0859: i = LT(1);
0860: i_AST = (GrammarAST) astFactory.create(i);
0861: astFactory.addASTChild(currentAST, i_AST);
0862: match(INT);
0863: value = new Integer(i_AST.getText());
0864: optionValue_AST = (GrammarAST) currentAST.root;
0865: break;
0866: }
0867: case STAR: {
0868: ss = LT(1);
0869: ss_AST = (GrammarAST) astFactory.create(ss);
0870: astFactory.addASTChild(currentAST, ss_AST);
0871: match(STAR);
0872: ss_AST.setType(STRING_LITERAL);
0873: value = "*";
0874: optionValue_AST = (GrammarAST) currentAST.root;
0875: break;
0876: }
0877: default: {
0878: throw new NoViableAltException(LT(1), getFilename());
0879: }
0880: }
0881: } catch (RecognitionException ex) {
0882: reportError(ex);
0883: recover(ex, _tokenSet_9);
0884: }
0885: returnAST = optionValue_AST;
0886: return value;
0887: }
0888:
0889: public final void tokenSpec() throws RecognitionException,
0890: TokenStreamException {
0891:
0892: returnAST = null;
0893: ASTPair currentAST = new ASTPair();
0894: GrammarAST tokenSpec_AST = null;
0895:
0896: try { // for error handling
0897: GrammarAST tmp19_AST = null;
0898: tmp19_AST = (GrammarAST) astFactory.create(LT(1));
0899: astFactory.addASTChild(currentAST, tmp19_AST);
0900: match(TOKEN_REF);
0901: {
0902: switch (LA(1)) {
0903: case ASSIGN: {
0904: GrammarAST tmp20_AST = null;
0905: tmp20_AST = (GrammarAST) astFactory.create(LT(1));
0906: astFactory.makeASTRoot(currentAST, tmp20_AST);
0907: match(ASSIGN);
0908: {
0909: switch (LA(1)) {
0910: case STRING_LITERAL: {
0911: GrammarAST tmp21_AST = null;
0912: tmp21_AST = (GrammarAST) astFactory
0913: .create(LT(1));
0914: astFactory.addASTChild(currentAST,
0915: tmp21_AST);
0916: match(STRING_LITERAL);
0917: break;
0918: }
0919: case CHAR_LITERAL: {
0920: GrammarAST tmp22_AST = null;
0921: tmp22_AST = (GrammarAST) astFactory
0922: .create(LT(1));
0923: astFactory.addASTChild(currentAST,
0924: tmp22_AST);
0925: match(CHAR_LITERAL);
0926: break;
0927: }
0928: default: {
0929: throw new NoViableAltException(LT(1),
0930: getFilename());
0931: }
0932: }
0933: }
0934: break;
0935: }
0936: case SEMI: {
0937: break;
0938: }
0939: default: {
0940: throw new NoViableAltException(LT(1), getFilename());
0941: }
0942: }
0943: }
0944: match(SEMI);
0945: tokenSpec_AST = (GrammarAST) currentAST.root;
0946: } catch (RecognitionException ex) {
0947: reportError(ex);
0948: recover(ex, _tokenSet_10);
0949: }
0950: returnAST = tokenSpec_AST;
0951: }
0952:
0953: public final void attrScope() throws RecognitionException,
0954: TokenStreamException {
0955:
0956: returnAST = null;
0957: ASTPair currentAST = new ASTPair();
0958: GrammarAST attrScope_AST = null;
0959:
0960: try { // for error handling
0961: GrammarAST tmp24_AST = null;
0962: tmp24_AST = (GrammarAST) astFactory.create(LT(1));
0963: astFactory.makeASTRoot(currentAST, tmp24_AST);
0964: match(SCOPE);
0965: id();
0966: astFactory.addASTChild(currentAST, returnAST);
0967: GrammarAST tmp25_AST = null;
0968: tmp25_AST = (GrammarAST) astFactory.create(LT(1));
0969: astFactory.addASTChild(currentAST, tmp25_AST);
0970: match(ACTION);
0971: attrScope_AST = (GrammarAST) currentAST.root;
0972: } catch (RecognitionException ex) {
0973: reportError(ex);
0974: recover(ex, _tokenSet_4);
0975: }
0976: returnAST = attrScope_AST;
0977: }
0978:
0979: public final void rule() throws RecognitionException,
0980: TokenStreamException {
0981:
0982: returnAST = null;
0983: ASTPair currentAST = new ASTPair();
0984: GrammarAST rule_AST = null;
0985: Token d = null;
0986: GrammarAST d_AST = null;
0987: Token p1 = null;
0988: GrammarAST p1_AST = null;
0989: Token p2 = null;
0990: GrammarAST p2_AST = null;
0991: Token p3 = null;
0992: GrammarAST p3_AST = null;
0993: Token p4 = null;
0994: GrammarAST p4_AST = null;
0995: GrammarAST ruleName_AST = null;
0996: Token aa = null;
0997: GrammarAST aa_AST = null;
0998: Token rt = null;
0999: GrammarAST rt_AST = null;
1000: GrammarAST scopes_AST = null;
1001: GrammarAST a_AST = null;
1002: Token colon = null;
1003: GrammarAST colon_AST = null;
1004: GrammarAST b_AST = null;
1005: Token semi = null;
1006: GrammarAST semi_AST = null;
1007: GrammarAST ex_AST = null;
1008:
1009: GrammarAST modifier = null, blk = null, blkRoot = null, eob = null;
1010: int start = ((TokenWithIndex) LT(1)).getIndex();
1011: int startLine = LT(1).getLine();
1012: GrammarAST opt = null;
1013: Map opts = null;
1014:
1015: try { // for error handling
1016: {
1017: switch (LA(1)) {
1018: case DOC_COMMENT: {
1019: d = LT(1);
1020: d_AST = (GrammarAST) astFactory.create(d);
1021: match(DOC_COMMENT);
1022: break;
1023: }
1024: case FRAGMENT:
1025: case TOKEN_REF:
1026: case LITERAL_protected:
1027: case LITERAL_public:
1028: case LITERAL_private:
1029: case RULE_REF: {
1030: break;
1031: }
1032: default: {
1033: throw new NoViableAltException(LT(1), getFilename());
1034: }
1035: }
1036: }
1037: {
1038: switch (LA(1)) {
1039: case LITERAL_protected: {
1040: p1 = LT(1);
1041: p1_AST = (GrammarAST) astFactory.create(p1);
1042: match(LITERAL_protected);
1043: modifier = p1_AST;
1044: break;
1045: }
1046: case LITERAL_public: {
1047: p2 = LT(1);
1048: p2_AST = (GrammarAST) astFactory.create(p2);
1049: match(LITERAL_public);
1050: modifier = p2_AST;
1051: break;
1052: }
1053: case LITERAL_private: {
1054: p3 = LT(1);
1055: p3_AST = (GrammarAST) astFactory.create(p3);
1056: match(LITERAL_private);
1057: modifier = p3_AST;
1058: break;
1059: }
1060: case FRAGMENT: {
1061: p4 = LT(1);
1062: p4_AST = (GrammarAST) astFactory.create(p4);
1063: match(FRAGMENT);
1064: modifier = p4_AST;
1065: break;
1066: }
1067: case TOKEN_REF:
1068: case RULE_REF: {
1069: break;
1070: }
1071: default: {
1072: throw new NoViableAltException(LT(1), getFilename());
1073: }
1074: }
1075: }
1076: id();
1077: ruleName_AST = (GrammarAST) returnAST;
1078: currentRuleName = ruleName_AST.getText();
1079: if (gtype == LEXER_GRAMMAR && p4_AST == null) {
1080: lexerRuleNames.add(currentRuleName);
1081: }
1082:
1083: {
1084: switch (LA(1)) {
1085: case BANG: {
1086: GrammarAST tmp26_AST = null;
1087: tmp26_AST = (GrammarAST) astFactory.create(LT(1));
1088: match(BANG);
1089: break;
1090: }
1091: case OPTIONS:
1092: case SCOPE:
1093: case AMPERSAND:
1094: case COLON:
1095: case ARG_ACTION:
1096: case LITERAL_returns:
1097: case LITERAL_throws: {
1098: break;
1099: }
1100: default: {
1101: throw new NoViableAltException(LT(1), getFilename());
1102: }
1103: }
1104: }
1105: {
1106: switch (LA(1)) {
1107: case ARG_ACTION: {
1108: aa = LT(1);
1109: aa_AST = (GrammarAST) astFactory.create(aa);
1110: match(ARG_ACTION);
1111: break;
1112: }
1113: case OPTIONS:
1114: case SCOPE:
1115: case AMPERSAND:
1116: case COLON:
1117: case LITERAL_returns:
1118: case LITERAL_throws: {
1119: break;
1120: }
1121: default: {
1122: throw new NoViableAltException(LT(1), getFilename());
1123: }
1124: }
1125: }
1126: {
1127: switch (LA(1)) {
1128: case LITERAL_returns: {
1129: match(LITERAL_returns);
1130: rt = LT(1);
1131: rt_AST = (GrammarAST) astFactory.create(rt);
1132: match(ARG_ACTION);
1133: break;
1134: }
1135: case OPTIONS:
1136: case SCOPE:
1137: case AMPERSAND:
1138: case COLON:
1139: case LITERAL_throws: {
1140: break;
1141: }
1142: default: {
1143: throw new NoViableAltException(LT(1), getFilename());
1144: }
1145: }
1146: }
1147: {
1148: switch (LA(1)) {
1149: case LITERAL_throws: {
1150: throwsSpec();
1151: break;
1152: }
1153: case OPTIONS:
1154: case SCOPE:
1155: case AMPERSAND:
1156: case COLON: {
1157: break;
1158: }
1159: default: {
1160: throw new NoViableAltException(LT(1), getFilename());
1161: }
1162: }
1163: }
1164: {
1165: switch (LA(1)) {
1166: case OPTIONS: {
1167: opts = optionsSpec();
1168: opt = (GrammarAST) returnAST;
1169: break;
1170: }
1171: case SCOPE:
1172: case AMPERSAND:
1173: case COLON: {
1174: break;
1175: }
1176: default: {
1177: throw new NoViableAltException(LT(1), getFilename());
1178: }
1179: }
1180: }
1181: ruleScopeSpec();
1182: scopes_AST = (GrammarAST) returnAST;
1183: {
1184: switch (LA(1)) {
1185: case AMPERSAND: {
1186: ruleActions();
1187: a_AST = (GrammarAST) returnAST;
1188: break;
1189: }
1190: case COLON: {
1191: break;
1192: }
1193: default: {
1194: throw new NoViableAltException(LT(1), getFilename());
1195: }
1196: }
1197: }
1198: colon = LT(1);
1199: colon_AST = (GrammarAST) astFactory.create(colon);
1200: match(COLON);
1201:
1202: blkRoot = (GrammarAST) astFactory.create(BLOCK, "BLOCK");
1203: blkRoot.options = opts;
1204: blkRoot.setLine(colon.getLine());
1205: blkRoot.setColumn(colon.getColumn());
1206: eob = (GrammarAST) astFactory.create(EOB, "<end-of-block>");
1207:
1208: altList(opts);
1209: b_AST = (GrammarAST) returnAST;
1210: blk = b_AST;
1211: semi = LT(1);
1212: semi_AST = (GrammarAST) astFactory.create(semi);
1213: match(SEMI);
1214: {
1215: switch (LA(1)) {
1216: case LITERAL_catch:
1217: case LITERAL_finally: {
1218: exceptionGroup();
1219: ex_AST = (GrammarAST) returnAST;
1220: break;
1221: }
1222: case EOF:
1223: case FRAGMENT:
1224: case DOC_COMMENT:
1225: case TOKEN_REF:
1226: case LITERAL_protected:
1227: case LITERAL_public:
1228: case LITERAL_private:
1229: case RULE_REF: {
1230: break;
1231: }
1232: default: {
1233: throw new NoViableAltException(LT(1), getFilename());
1234: }
1235: }
1236: }
1237: rule_AST = (GrammarAST) currentAST.root;
1238:
1239: int stop = ((TokenWithIndex) LT(1)).getIndex() - 1; // point at the semi or exception thingie
1240: eob.setLine(semi.getLine());
1241: eob.setColumn(semi.getColumn());
1242: GrammarAST eor = (GrammarAST) astFactory.create(EOR,
1243: "<end-of-rule>");
1244: eor.setEnclosingRule(ruleName_AST.getText());
1245: eor.setLine(semi.getLine());
1246: eor.setColumn(semi.getColumn());
1247: GrammarAST root = (GrammarAST) astFactory.create(RULE,
1248: "rule");
1249: root.ruleStartTokenIndex = start;
1250: root.ruleStopTokenIndex = stop;
1251: root.setLine(startLine);
1252: root.options = opts;
1253: rule_AST = (GrammarAST) astFactory.make((new ASTArray(11))
1254: .add(root).add(ruleName_AST).add(modifier).add(
1255: (GrammarAST) astFactory.make((new ASTArray(
1256: 2)).add(
1257: (GrammarAST) astFactory.create(ARG,
1258: "ARG")).add(aa_AST))).add(
1259: (GrammarAST) astFactory.make((new ASTArray(
1260: 2)).add(
1261: (GrammarAST) astFactory.create(RET,
1262: "RET")).add(rt_AST))).add(
1263: opt).add(scopes_AST).add(a_AST).add(blk)
1264: .add(ex_AST).add(eor));
1265: currentRuleName = null;
1266:
1267: currentAST.root = rule_AST;
1268: currentAST.child = rule_AST != null
1269: && rule_AST.getFirstChild() != null ? rule_AST
1270: .getFirstChild() : rule_AST;
1271: currentAST.advanceChildToEnd();
1272: } catch (RecognitionException ex) {
1273: reportError(ex);
1274: recover(ex, _tokenSet_11);
1275: }
1276: returnAST = rule_AST;
1277: }
1278:
1279: public final void throwsSpec() throws RecognitionException,
1280: TokenStreamException {
1281:
1282: returnAST = null;
1283: ASTPair currentAST = new ASTPair();
1284: GrammarAST throwsSpec_AST = null;
1285:
1286: try { // for error handling
1287: GrammarAST tmp28_AST = null;
1288: tmp28_AST = (GrammarAST) astFactory.create(LT(1));
1289: astFactory.addASTChild(currentAST, tmp28_AST);
1290: match(LITERAL_throws);
1291: id();
1292: astFactory.addASTChild(currentAST, returnAST);
1293: {
1294: _loop49: do {
1295: if ((LA(1) == COMMA)) {
1296: GrammarAST tmp29_AST = null;
1297: tmp29_AST = (GrammarAST) astFactory
1298: .create(LT(1));
1299: astFactory.addASTChild(currentAST, tmp29_AST);
1300: match(COMMA);
1301: id();
1302: astFactory.addASTChild(currentAST, returnAST);
1303: } else {
1304: break _loop49;
1305: }
1306:
1307: } while (true);
1308: }
1309: throwsSpec_AST = (GrammarAST) currentAST.root;
1310: } catch (RecognitionException ex) {
1311: reportError(ex);
1312: recover(ex, _tokenSet_12);
1313: }
1314: returnAST = throwsSpec_AST;
1315: }
1316:
1317: public final void ruleScopeSpec() throws RecognitionException,
1318: TokenStreamException {
1319:
1320: returnAST = null;
1321: ASTPair currentAST = new ASTPair();
1322: GrammarAST ruleScopeSpec_AST = null;
1323: Token a = null;
1324: GrammarAST a_AST = null;
1325: GrammarAST ids_AST = null;
1326:
1327: int line = LT(1).getLine();
1328: int column = LT(1).getColumn();
1329:
1330: try { // for error handling
1331: {
1332: if ((LA(1) == SCOPE) && (LA(2) == ACTION)) {
1333: match(SCOPE);
1334: a = LT(1);
1335: a_AST = (GrammarAST) astFactory.create(a);
1336: match(ACTION);
1337: } else if ((LA(1) == SCOPE || LA(1) == AMPERSAND || LA(1) == COLON)
1338: && (_tokenSet_13.member(LA(2)))) {
1339: } else {
1340: throw new NoViableAltException(LT(1), getFilename());
1341: }
1342:
1343: }
1344: {
1345: _loop53: do {
1346: if ((LA(1) == SCOPE)) {
1347: match(SCOPE);
1348: idList();
1349: ids_AST = (GrammarAST) returnAST;
1350: match(SEMI);
1351: } else {
1352: break _loop53;
1353: }
1354:
1355: } while (true);
1356: }
1357: ruleScopeSpec_AST = (GrammarAST) currentAST.root;
1358:
1359: GrammarAST scopeRoot = (GrammarAST) (GrammarAST) astFactory
1360: .create(SCOPE, "scope");
1361: scopeRoot.setLine(line);
1362: scopeRoot.setColumn(column);
1363: ruleScopeSpec_AST = (GrammarAST) astFactory
1364: .make((new ASTArray(3)).add(scopeRoot).add(a_AST)
1365: .add(ids_AST));
1366:
1367: currentAST.root = ruleScopeSpec_AST;
1368: currentAST.child = ruleScopeSpec_AST != null
1369: && ruleScopeSpec_AST.getFirstChild() != null ? ruleScopeSpec_AST
1370: .getFirstChild()
1371: : ruleScopeSpec_AST;
1372: currentAST.advanceChildToEnd();
1373: } catch (RecognitionException ex) {
1374: reportError(ex);
1375: recover(ex, _tokenSet_14);
1376: }
1377: returnAST = ruleScopeSpec_AST;
1378: }
1379:
1380: public final void ruleActions() throws RecognitionException,
1381: TokenStreamException {
1382:
1383: returnAST = null;
1384: ASTPair currentAST = new ASTPair();
1385: GrammarAST ruleActions_AST = null;
1386:
1387: try { // for error handling
1388: {
1389: int _cnt45 = 0;
1390: _loop45: do {
1391: if ((LA(1) == AMPERSAND)) {
1392: ruleAction();
1393: astFactory.addASTChild(currentAST, returnAST);
1394: } else {
1395: if (_cnt45 >= 1) {
1396: break _loop45;
1397: } else {
1398: throw new NoViableAltException(LT(1),
1399: getFilename());
1400: }
1401: }
1402:
1403: _cnt45++;
1404: } while (true);
1405: }
1406: ruleActions_AST = (GrammarAST) currentAST.root;
1407: } catch (RecognitionException ex) {
1408: reportError(ex);
1409: recover(ex, _tokenSet_8);
1410: }
1411: returnAST = ruleActions_AST;
1412: }
1413:
1414: public final void altList(Map opts) throws RecognitionException,
1415: TokenStreamException {
1416:
1417: returnAST = null;
1418: ASTPair currentAST = new ASTPair();
1419: GrammarAST altList_AST = null;
1420: GrammarAST a1_AST = null;
1421: GrammarAST a2_AST = null;
1422:
1423: GrammarAST blkRoot = (GrammarAST) astFactory.create(BLOCK,
1424: "BLOCK");
1425: blkRoot.options = opts;
1426: blkRoot.setLine(LT(0).getLine()); // set to : or (
1427: blkRoot.setColumn(LT(0).getColumn());
1428: GrammarAST save = currentBlockAST;
1429: currentBlockAST = blkRoot;
1430:
1431: try { // for error handling
1432: alternative();
1433: a1_AST = (GrammarAST) returnAST;
1434: astFactory.addASTChild(currentAST, returnAST);
1435: rewrite();
1436: astFactory.addASTChild(currentAST, returnAST);
1437: if (LA(1) == OR
1438: || (LA(2) == QUESTION || LA(2) == PLUS || LA(2) == STAR))
1439: prefixWithSynPred(a1_AST);
1440: {
1441: _loop62: do {
1442: if ((LA(1) == OR)) {
1443: match(OR);
1444: alternative();
1445: a2_AST = (GrammarAST) returnAST;
1446: astFactory.addASTChild(currentAST, returnAST);
1447: rewrite();
1448: astFactory.addASTChild(currentAST, returnAST);
1449: if (LA(1) == OR
1450: || (LA(2) == QUESTION || LA(2) == PLUS || LA(2) == STAR))
1451: prefixWithSynPred(a2_AST);
1452: } else {
1453: break _loop62;
1454: }
1455:
1456: } while (true);
1457: }
1458: altList_AST = (GrammarAST) currentAST.root;
1459:
1460: altList_AST = (GrammarAST) astFactory
1461: .make((new ASTArray(3)).add(blkRoot).add(
1462: altList_AST).add(
1463: (GrammarAST) astFactory.create(EOB,
1464: "<end-of-block>")));
1465: currentBlockAST = save;
1466:
1467: currentAST.root = altList_AST;
1468: currentAST.child = altList_AST != null
1469: && altList_AST.getFirstChild() != null ? altList_AST
1470: .getFirstChild()
1471: : altList_AST;
1472: currentAST.advanceChildToEnd();
1473: altList_AST = (GrammarAST) currentAST.root;
1474: } catch (RecognitionException ex) {
1475: reportError(ex);
1476: recover(ex, _tokenSet_9);
1477: }
1478: returnAST = altList_AST;
1479: }
1480:
1481: public final void exceptionGroup() throws RecognitionException,
1482: TokenStreamException {
1483:
1484: returnAST = null;
1485: ASTPair currentAST = new ASTPair();
1486: GrammarAST exceptionGroup_AST = null;
1487:
1488: try { // for error handling
1489: switch (LA(1)) {
1490: case LITERAL_catch: {
1491: {
1492: int _cnt68 = 0;
1493: _loop68: do {
1494: if ((LA(1) == LITERAL_catch)) {
1495: exceptionHandler();
1496: astFactory.addASTChild(currentAST,
1497: returnAST);
1498: } else {
1499: if (_cnt68 >= 1) {
1500: break _loop68;
1501: } else {
1502: throw new NoViableAltException(LT(1),
1503: getFilename());
1504: }
1505: }
1506:
1507: _cnt68++;
1508: } while (true);
1509: }
1510: {
1511: switch (LA(1)) {
1512: case LITERAL_finally: {
1513: finallyClause();
1514: astFactory.addASTChild(currentAST, returnAST);
1515: break;
1516: }
1517: case EOF:
1518: case FRAGMENT:
1519: case DOC_COMMENT:
1520: case TOKEN_REF:
1521: case LITERAL_protected:
1522: case LITERAL_public:
1523: case LITERAL_private:
1524: case RULE_REF: {
1525: break;
1526: }
1527: default: {
1528: throw new NoViableAltException(LT(1),
1529: getFilename());
1530: }
1531: }
1532: }
1533: exceptionGroup_AST = (GrammarAST) currentAST.root;
1534: break;
1535: }
1536: case LITERAL_finally: {
1537: finallyClause();
1538: astFactory.addASTChild(currentAST, returnAST);
1539: exceptionGroup_AST = (GrammarAST) currentAST.root;
1540: break;
1541: }
1542: default: {
1543: throw new NoViableAltException(LT(1), getFilename());
1544: }
1545: }
1546: } catch (RecognitionException ex) {
1547: reportError(ex);
1548: recover(ex, _tokenSet_11);
1549: }
1550: returnAST = exceptionGroup_AST;
1551: }
1552:
1553: /** Match stuff like @init {int i;} */
1554: public final void ruleAction() throws RecognitionException,
1555: TokenStreamException {
1556:
1557: returnAST = null;
1558: ASTPair currentAST = new ASTPair();
1559: GrammarAST ruleAction_AST = null;
1560:
1561: try { // for error handling
1562: GrammarAST tmp34_AST = null;
1563: tmp34_AST = (GrammarAST) astFactory.create(LT(1));
1564: astFactory.makeASTRoot(currentAST, tmp34_AST);
1565: match(AMPERSAND);
1566: id();
1567: astFactory.addASTChild(currentAST, returnAST);
1568: GrammarAST tmp35_AST = null;
1569: tmp35_AST = (GrammarAST) astFactory.create(LT(1));
1570: astFactory.addASTChild(currentAST, tmp35_AST);
1571: match(ACTION);
1572: ruleAction_AST = (GrammarAST) currentAST.root;
1573: } catch (RecognitionException ex) {
1574: reportError(ex);
1575: recover(ex, _tokenSet_14);
1576: }
1577: returnAST = ruleAction_AST;
1578: }
1579:
1580: public final void idList() throws RecognitionException,
1581: TokenStreamException {
1582:
1583: returnAST = null;
1584: ASTPair currentAST = new ASTPair();
1585: GrammarAST idList_AST = null;
1586:
1587: try { // for error handling
1588: {
1589: int _cnt103 = 0;
1590: _loop103: do {
1591: if ((LA(1) == TOKEN_REF || LA(1) == RULE_REF)) {
1592: id();
1593: astFactory.addASTChild(currentAST, returnAST);
1594: } else {
1595: if (_cnt103 >= 1) {
1596: break _loop103;
1597: } else {
1598: throw new NoViableAltException(LT(1),
1599: getFilename());
1600: }
1601: }
1602:
1603: _cnt103++;
1604: } while (true);
1605: }
1606: idList_AST = (GrammarAST) currentAST.root;
1607: } catch (RecognitionException ex) {
1608: reportError(ex);
1609: recover(ex, _tokenSet_9);
1610: }
1611: returnAST = idList_AST;
1612: }
1613:
1614: /** Build #(BLOCK ( #(ALT ...) EOB )+ ) */
1615: public final void block() throws RecognitionException,
1616: TokenStreamException {
1617:
1618: returnAST = null;
1619: ASTPair currentAST = new ASTPair();
1620: GrammarAST block_AST = null;
1621: Token lp = null;
1622: GrammarAST lp_AST = null;
1623: GrammarAST a1_AST = null;
1624: GrammarAST a2_AST = null;
1625: Token rp = null;
1626: GrammarAST rp_AST = null;
1627:
1628: GrammarAST save = currentBlockAST;
1629: Map opts = null;
1630:
1631: try { // for error handling
1632: lp = LT(1);
1633: lp_AST = (GrammarAST) astFactory.create(lp);
1634: astFactory.makeASTRoot(currentAST, lp_AST);
1635: match(LPAREN);
1636: lp_AST.setType(BLOCK);
1637: lp_AST.setText("BLOCK");
1638: {
1639: if ((LA(1) == OPTIONS || LA(1) == AMPERSAND || LA(1) == COLON)) {
1640: {
1641: switch (LA(1)) {
1642: case OPTIONS: {
1643: opts = optionsSpec();
1644: astFactory.addASTChild(currentAST,
1645: returnAST);
1646: block_AST = (GrammarAST) currentAST.root;
1647: block_AST.setOptions(grammar, opts);
1648: break;
1649: }
1650: case AMPERSAND:
1651: case COLON: {
1652: break;
1653: }
1654: default: {
1655: throw new NoViableAltException(LT(1),
1656: getFilename());
1657: }
1658: }
1659: }
1660: {
1661: switch (LA(1)) {
1662: case AMPERSAND: {
1663: ruleActions();
1664: astFactory.addASTChild(currentAST,
1665: returnAST);
1666: break;
1667: }
1668: case COLON: {
1669: break;
1670: }
1671: default: {
1672: throw new NoViableAltException(LT(1),
1673: getFilename());
1674: }
1675: }
1676: }
1677: match(COLON);
1678: } else if ((LA(1) == ACTION) && (LA(2) == COLON)) {
1679: GrammarAST tmp37_AST = null;
1680: tmp37_AST = (GrammarAST) astFactory.create(LT(1));
1681: astFactory.addASTChild(currentAST, tmp37_AST);
1682: match(ACTION);
1683: match(COLON);
1684: } else if ((_tokenSet_15.member(LA(1)))
1685: && (_tokenSet_16.member(LA(2)))) {
1686: } else {
1687: throw new NoViableAltException(LT(1), getFilename());
1688: }
1689:
1690: }
1691: currentBlockAST = lp_AST;
1692: alternative();
1693: a1_AST = (GrammarAST) returnAST;
1694: astFactory.addASTChild(currentAST, returnAST);
1695: rewrite();
1696: astFactory.addASTChild(currentAST, returnAST);
1697: if (LA(1) == OR
1698: || (LA(2) == QUESTION || LA(2) == PLUS || LA(2) == STAR))
1699: prefixWithSynPred(a1_AST);
1700: {
1701: _loop59: do {
1702: if ((LA(1) == OR)) {
1703: match(OR);
1704: alternative();
1705: a2_AST = (GrammarAST) returnAST;
1706: astFactory.addASTChild(currentAST, returnAST);
1707: rewrite();
1708: astFactory.addASTChild(currentAST, returnAST);
1709: if (LA(1) == OR
1710: || (LA(2) == QUESTION || LA(2) == PLUS || LA(2) == STAR))
1711: prefixWithSynPred(a2_AST);
1712: } else {
1713: break _loop59;
1714: }
1715:
1716: } while (true);
1717: }
1718: rp = LT(1);
1719: rp_AST = (GrammarAST) astFactory.create(rp);
1720: match(RPAREN);
1721: block_AST = (GrammarAST) currentAST.root;
1722:
1723: currentBlockAST = save;
1724: GrammarAST eob = (GrammarAST) astFactory.create(EOB,
1725: "<end-of-block>");
1726: eob.setLine(rp.getLine());
1727: eob.setColumn(rp.getColumn());
1728: block_AST.addChild(eob);
1729:
1730: block_AST = (GrammarAST) currentAST.root;
1731: } catch (RecognitionException ex) {
1732: reportError(ex);
1733: recover(ex, _tokenSet_17);
1734: }
1735: returnAST = block_AST;
1736: }
1737:
1738: public final void alternative() throws RecognitionException,
1739: TokenStreamException {
1740:
1741: returnAST = null;
1742: ASTPair currentAST = new ASTPair();
1743: GrammarAST alternative_AST = null;
1744: GrammarAST el_AST = null;
1745:
1746: GrammarAST eoa = (GrammarAST) astFactory.create(EOA,
1747: "<end-of-alt>");
1748: GrammarAST altRoot = (GrammarAST) astFactory.create(ALT, "ALT");
1749: altRoot.setLine(LT(1).getLine());
1750: altRoot.setColumn(LT(1).getColumn());
1751:
1752: try { // for error handling
1753: switch (LA(1)) {
1754: case ACTION:
1755: case STRING_LITERAL:
1756: case CHAR_LITERAL:
1757: case TOKEN_REF:
1758: case LPAREN:
1759: case SEMPRED:
1760: case RULE_REF:
1761: case NOT:
1762: case TREE_BEGIN:
1763: case WILDCARD: {
1764: {
1765: int _cnt65 = 0;
1766: _loop65: do {
1767: if ((_tokenSet_18.member(LA(1)))) {
1768: element();
1769: el_AST = (GrammarAST) returnAST;
1770: astFactory.addASTChild(currentAST,
1771: returnAST);
1772: } else {
1773: if (_cnt65 >= 1) {
1774: break _loop65;
1775: } else {
1776: throw new NoViableAltException(LT(1),
1777: getFilename());
1778: }
1779: }
1780:
1781: _cnt65++;
1782: } while (true);
1783: }
1784: alternative_AST = (GrammarAST) currentAST.root;
1785:
1786: if (alternative_AST == null) {
1787: alternative_AST = (GrammarAST) astFactory
1788: .make((new ASTArray(3)).add(altRoot).add(
1789: (GrammarAST) astFactory.create(
1790: EPSILON, "epsilon")).add(
1791: eoa));
1792: } else {
1793: // we have a real list of stuff
1794: alternative_AST = (GrammarAST) astFactory
1795: .make((new ASTArray(3)).add(altRoot).add(
1796: alternative_AST).add(eoa));
1797: }
1798:
1799: currentAST.root = alternative_AST;
1800: currentAST.child = alternative_AST != null
1801: && alternative_AST.getFirstChild() != null ? alternative_AST
1802: .getFirstChild()
1803: : alternative_AST;
1804: currentAST.advanceChildToEnd();
1805: alternative_AST = (GrammarAST) currentAST.root;
1806: break;
1807: }
1808: case SEMI:
1809: case OR:
1810: case RPAREN:
1811: case REWRITE: {
1812: alternative_AST = (GrammarAST) currentAST.root;
1813:
1814: GrammarAST eps = (GrammarAST) astFactory.create(
1815: EPSILON, "epsilon");
1816: eps.setLine(LT(0).getLine()); // get line/col of '|' or ':' (prev token)
1817: eps.setColumn(LT(0).getColumn());
1818: alternative_AST = (GrammarAST) astFactory
1819: .make((new ASTArray(3)).add(altRoot).add(eps)
1820: .add(eoa));
1821:
1822: currentAST.root = alternative_AST;
1823: currentAST.child = alternative_AST != null
1824: && alternative_AST.getFirstChild() != null ? alternative_AST
1825: .getFirstChild()
1826: : alternative_AST;
1827: currentAST.advanceChildToEnd();
1828: alternative_AST = (GrammarAST) currentAST.root;
1829: break;
1830: }
1831: default: {
1832: throw new NoViableAltException(LT(1), getFilename());
1833: }
1834: }
1835: } catch (RecognitionException ex) {
1836: reportError(ex);
1837: recover(ex, _tokenSet_19);
1838: }
1839: returnAST = alternative_AST;
1840: }
1841:
1842: public final void rewrite() throws RecognitionException,
1843: TokenStreamException {
1844:
1845: returnAST = null;
1846: ASTPair currentAST = new ASTPair();
1847: GrammarAST rewrite_AST = null;
1848: Token rew = null;
1849: GrammarAST rew_AST = null;
1850: Token pred = null;
1851: GrammarAST pred_AST = null;
1852: GrammarAST alt_AST = null;
1853: Token rew2 = null;
1854: GrammarAST rew2_AST = null;
1855: GrammarAST alt2_AST = null;
1856:
1857: GrammarAST root = new GrammarAST();
1858:
1859: try { // for error handling
1860: switch (LA(1)) {
1861: case REWRITE: {
1862: {
1863: _loop108: do {
1864: if ((LA(1) == REWRITE) && (LA(2) == SEMPRED)) {
1865: rew = LT(1);
1866: rew_AST = (GrammarAST) astFactory
1867: .create(rew);
1868: match(REWRITE);
1869: pred = LT(1);
1870: pred_AST = (GrammarAST) astFactory
1871: .create(pred);
1872: match(SEMPRED);
1873: rewrite_alternative();
1874: alt_AST = (GrammarAST) returnAST;
1875: root.addChild((GrammarAST) astFactory
1876: .make((new ASTArray(3))
1877: .add(rew_AST).add(pred_AST)
1878: .add(alt_AST)));
1879:
1880: pred_AST.setEnclosingRule(currentRuleName);
1881: rew_AST.setEnclosingRule(currentRuleName);
1882:
1883: } else {
1884: break _loop108;
1885: }
1886:
1887: } while (true);
1888: }
1889: rew2 = LT(1);
1890: rew2_AST = (GrammarAST) astFactory.create(rew2);
1891: match(REWRITE);
1892: rewrite_alternative();
1893: alt2_AST = (GrammarAST) returnAST;
1894: rewrite_AST = (GrammarAST) currentAST.root;
1895:
1896: root.addChild((GrammarAST) astFactory
1897: .make((new ASTArray(2)).add(rew2_AST).add(
1898: alt2_AST)));
1899: rewrite_AST = (GrammarAST) root.getFirstChild();
1900:
1901: currentAST.root = rewrite_AST;
1902: currentAST.child = rewrite_AST != null
1903: && rewrite_AST.getFirstChild() != null ? rewrite_AST
1904: .getFirstChild()
1905: : rewrite_AST;
1906: currentAST.advanceChildToEnd();
1907: break;
1908: }
1909: case SEMI:
1910: case OR:
1911: case RPAREN: {
1912: rewrite_AST = (GrammarAST) currentAST.root;
1913: break;
1914: }
1915: default: {
1916: throw new NoViableAltException(LT(1), getFilename());
1917: }
1918: }
1919: } catch (RecognitionException ex) {
1920: reportError(ex);
1921: recover(ex, _tokenSet_20);
1922: }
1923: returnAST = rewrite_AST;
1924: }
1925:
1926: public final void element() throws RecognitionException,
1927: TokenStreamException {
1928:
1929: returnAST = null;
1930: ASTPair currentAST = new ASTPair();
1931: GrammarAST element_AST = null;
1932:
1933: try { // for error handling
1934: elementNoOptionSpec();
1935: astFactory.addASTChild(currentAST, returnAST);
1936: element_AST = (GrammarAST) currentAST.root;
1937: } catch (RecognitionException ex) {
1938: reportError(ex);
1939: recover(ex, _tokenSet_21);
1940: }
1941: returnAST = element_AST;
1942: }
1943:
1944: public final void exceptionHandler() throws RecognitionException,
1945: TokenStreamException {
1946:
1947: returnAST = null;
1948: ASTPair currentAST = new ASTPair();
1949: GrammarAST exceptionHandler_AST = null;
1950:
1951: try { // for error handling
1952: GrammarAST tmp40_AST = null;
1953: tmp40_AST = (GrammarAST) astFactory.create(LT(1));
1954: astFactory.makeASTRoot(currentAST, tmp40_AST);
1955: match(LITERAL_catch);
1956: GrammarAST tmp41_AST = null;
1957: tmp41_AST = (GrammarAST) astFactory.create(LT(1));
1958: astFactory.addASTChild(currentAST, tmp41_AST);
1959: match(ARG_ACTION);
1960: GrammarAST tmp42_AST = null;
1961: tmp42_AST = (GrammarAST) astFactory.create(LT(1));
1962: astFactory.addASTChild(currentAST, tmp42_AST);
1963: match(ACTION);
1964: exceptionHandler_AST = (GrammarAST) currentAST.root;
1965: } catch (RecognitionException ex) {
1966: reportError(ex);
1967: recover(ex, _tokenSet_22);
1968: }
1969: returnAST = exceptionHandler_AST;
1970: }
1971:
1972: public final void finallyClause() throws RecognitionException,
1973: TokenStreamException {
1974:
1975: returnAST = null;
1976: ASTPair currentAST = new ASTPair();
1977: GrammarAST finallyClause_AST = null;
1978:
1979: try { // for error handling
1980: GrammarAST tmp43_AST = null;
1981: tmp43_AST = (GrammarAST) astFactory.create(LT(1));
1982: astFactory.makeASTRoot(currentAST, tmp43_AST);
1983: match(LITERAL_finally);
1984: GrammarAST tmp44_AST = null;
1985: tmp44_AST = (GrammarAST) astFactory.create(LT(1));
1986: astFactory.addASTChild(currentAST, tmp44_AST);
1987: match(ACTION);
1988: finallyClause_AST = (GrammarAST) currentAST.root;
1989: } catch (RecognitionException ex) {
1990: reportError(ex);
1991: recover(ex, _tokenSet_11);
1992: }
1993: returnAST = finallyClause_AST;
1994: }
1995:
1996: public final void elementNoOptionSpec()
1997: throws RecognitionException, TokenStreamException {
1998:
1999: returnAST = null;
2000: ASTPair currentAST = new ASTPair();
2001: GrammarAST elementNoOptionSpec_AST = null;
2002: Token p = null;
2003: GrammarAST p_AST = null;
2004: GrammarAST t3_AST = null;
2005:
2006: IntSet elements = null;
2007: GrammarAST sub, sub2;
2008:
2009: try { // for error handling
2010: switch (LA(1)) {
2011: case LPAREN: {
2012: ebnf();
2013: astFactory.addASTChild(currentAST, returnAST);
2014: elementNoOptionSpec_AST = (GrammarAST) currentAST.root;
2015: break;
2016: }
2017: case ACTION: {
2018: GrammarAST tmp45_AST = null;
2019: tmp45_AST = (GrammarAST) astFactory.create(LT(1));
2020: astFactory.addASTChild(currentAST, tmp45_AST);
2021: match(ACTION);
2022: elementNoOptionSpec_AST = (GrammarAST) currentAST.root;
2023: break;
2024: }
2025: case SEMPRED: {
2026: p = LT(1);
2027: p_AST = (GrammarAST) astFactory.create(p);
2028: astFactory.addASTChild(currentAST, p_AST);
2029: match(SEMPRED);
2030: {
2031: switch (LA(1)) {
2032: case IMPLIES: {
2033: match(IMPLIES);
2034: p_AST.setType(GATED_SEMPRED);
2035: break;
2036: }
2037: case ACTION:
2038: case SEMI:
2039: case STRING_LITERAL:
2040: case CHAR_LITERAL:
2041: case TOKEN_REF:
2042: case LPAREN:
2043: case OR:
2044: case RPAREN:
2045: case SEMPRED:
2046: case RULE_REF:
2047: case NOT:
2048: case TREE_BEGIN:
2049: case WILDCARD:
2050: case REWRITE: {
2051: break;
2052: }
2053: default: {
2054: throw new NoViableAltException(LT(1),
2055: getFilename());
2056: }
2057: }
2058: }
2059:
2060: p_AST.setEnclosingRule(currentRuleName);
2061: grammar.blocksWithSemPreds.add(currentBlockAST);
2062:
2063: elementNoOptionSpec_AST = (GrammarAST) currentAST.root;
2064: break;
2065: }
2066: case TREE_BEGIN: {
2067: tree();
2068: t3_AST = (GrammarAST) returnAST;
2069: astFactory.addASTChild(currentAST, returnAST);
2070: elementNoOptionSpec_AST = (GrammarAST) currentAST.root;
2071: break;
2072: }
2073: default:
2074: if ((LA(1) == TOKEN_REF || LA(1) == RULE_REF)
2075: && (LA(2) == ASSIGN || LA(2) == PLUS_ASSIGN)) {
2076: id();
2077: astFactory.addASTChild(currentAST, returnAST);
2078: {
2079: switch (LA(1)) {
2080: case ASSIGN: {
2081: GrammarAST tmp47_AST = null;
2082: tmp47_AST = (GrammarAST) astFactory
2083: .create(LT(1));
2084: astFactory.makeASTRoot(currentAST,
2085: tmp47_AST);
2086: match(ASSIGN);
2087: break;
2088: }
2089: case PLUS_ASSIGN: {
2090: GrammarAST tmp48_AST = null;
2091: tmp48_AST = (GrammarAST) astFactory
2092: .create(LT(1));
2093: astFactory.makeASTRoot(currentAST,
2094: tmp48_AST);
2095: match(PLUS_ASSIGN);
2096: break;
2097: }
2098: default: {
2099: throw new NoViableAltException(LT(1),
2100: getFilename());
2101: }
2102: }
2103: }
2104: {
2105: switch (LA(1)) {
2106: case STRING_LITERAL:
2107: case CHAR_LITERAL:
2108: case TOKEN_REF:
2109: case RULE_REF:
2110: case NOT:
2111: case WILDCARD: {
2112: atom();
2113: astFactory.addASTChild(currentAST,
2114: returnAST);
2115: break;
2116: }
2117: case LPAREN: {
2118: block();
2119: astFactory.addASTChild(currentAST,
2120: returnAST);
2121: break;
2122: }
2123: default: {
2124: throw new NoViableAltException(LT(1),
2125: getFilename());
2126: }
2127: }
2128: }
2129: {
2130: switch (LA(1)) {
2131: case STAR:
2132: case QUESTION:
2133: case PLUS: {
2134: sub = ebnfSuffix(
2135: (GrammarAST) currentAST.root, false);
2136: elementNoOptionSpec_AST = (GrammarAST) currentAST.root;
2137: elementNoOptionSpec_AST = sub;
2138: currentAST.root = elementNoOptionSpec_AST;
2139: currentAST.child = elementNoOptionSpec_AST != null
2140: && elementNoOptionSpec_AST
2141: .getFirstChild() != null ? elementNoOptionSpec_AST
2142: .getFirstChild()
2143: : elementNoOptionSpec_AST;
2144: currentAST.advanceChildToEnd();
2145: break;
2146: }
2147: case ACTION:
2148: case SEMI:
2149: case STRING_LITERAL:
2150: case CHAR_LITERAL:
2151: case TOKEN_REF:
2152: case LPAREN:
2153: case OR:
2154: case RPAREN:
2155: case SEMPRED:
2156: case RULE_REF:
2157: case NOT:
2158: case TREE_BEGIN:
2159: case WILDCARD:
2160: case REWRITE: {
2161: break;
2162: }
2163: default: {
2164: throw new NoViableAltException(LT(1),
2165: getFilename());
2166: }
2167: }
2168: }
2169: elementNoOptionSpec_AST = (GrammarAST) currentAST.root;
2170: } else if ((_tokenSet_23.member(LA(1)))
2171: && (_tokenSet_24.member(LA(2)))) {
2172: atom();
2173: astFactory.addASTChild(currentAST, returnAST);
2174: {
2175: switch (LA(1)) {
2176: case STAR:
2177: case QUESTION:
2178: case PLUS: {
2179: sub2 = ebnfSuffix(
2180: (GrammarAST) currentAST.root, false);
2181: elementNoOptionSpec_AST = (GrammarAST) currentAST.root;
2182: elementNoOptionSpec_AST = sub2;
2183: currentAST.root = elementNoOptionSpec_AST;
2184: currentAST.child = elementNoOptionSpec_AST != null
2185: && elementNoOptionSpec_AST
2186: .getFirstChild() != null ? elementNoOptionSpec_AST
2187: .getFirstChild()
2188: : elementNoOptionSpec_AST;
2189: currentAST.advanceChildToEnd();
2190: break;
2191: }
2192: case ACTION:
2193: case SEMI:
2194: case STRING_LITERAL:
2195: case CHAR_LITERAL:
2196: case TOKEN_REF:
2197: case LPAREN:
2198: case OR:
2199: case RPAREN:
2200: case SEMPRED:
2201: case RULE_REF:
2202: case NOT:
2203: case TREE_BEGIN:
2204: case WILDCARD:
2205: case REWRITE: {
2206: break;
2207: }
2208: default: {
2209: throw new NoViableAltException(LT(1),
2210: getFilename());
2211: }
2212: }
2213: }
2214: elementNoOptionSpec_AST = (GrammarAST) currentAST.root;
2215: } else {
2216: throw new NoViableAltException(LT(1), getFilename());
2217: }
2218: }
2219: } catch (RecognitionException ex) {
2220: reportError(ex);
2221: recover(ex, _tokenSet_21);
2222: }
2223: returnAST = elementNoOptionSpec_AST;
2224: }
2225:
2226: public final void atom() throws RecognitionException,
2227: TokenStreamException {
2228:
2229: returnAST = null;
2230: ASTPair currentAST = new ASTPair();
2231: GrammarAST atom_AST = null;
2232: Token rr = null;
2233: GrammarAST rr_AST = null;
2234:
2235: try { // for error handling
2236: switch (LA(1)) {
2237: case NOT: {
2238: notSet();
2239: astFactory.addASTChild(currentAST, returnAST);
2240: {
2241: switch (LA(1)) {
2242: case ROOT: {
2243: GrammarAST tmp49_AST = null;
2244: tmp49_AST = (GrammarAST) astFactory
2245: .create(LT(1));
2246: astFactory.makeASTRoot(currentAST, tmp49_AST);
2247: match(ROOT);
2248: break;
2249: }
2250: case BANG: {
2251: GrammarAST tmp50_AST = null;
2252: tmp50_AST = (GrammarAST) astFactory
2253: .create(LT(1));
2254: astFactory.makeASTRoot(currentAST, tmp50_AST);
2255: match(BANG);
2256: break;
2257: }
2258: case ACTION:
2259: case SEMI:
2260: case STRING_LITERAL:
2261: case CHAR_LITERAL:
2262: case STAR:
2263: case TOKEN_REF:
2264: case LPAREN:
2265: case OR:
2266: case RPAREN:
2267: case SEMPRED:
2268: case RULE_REF:
2269: case NOT:
2270: case TREE_BEGIN:
2271: case QUESTION:
2272: case PLUS:
2273: case WILDCARD:
2274: case REWRITE: {
2275: break;
2276: }
2277: default: {
2278: throw new NoViableAltException(LT(1),
2279: getFilename());
2280: }
2281: }
2282: }
2283: atom_AST = (GrammarAST) currentAST.root;
2284: break;
2285: }
2286: case RULE_REF: {
2287: rr = LT(1);
2288: rr_AST = (GrammarAST) astFactory.create(rr);
2289: astFactory.makeASTRoot(currentAST, rr_AST);
2290: match(RULE_REF);
2291: {
2292: switch (LA(1)) {
2293: case ARG_ACTION: {
2294: GrammarAST tmp51_AST = null;
2295: tmp51_AST = (GrammarAST) astFactory
2296: .create(LT(1));
2297: astFactory.addASTChild(currentAST, tmp51_AST);
2298: match(ARG_ACTION);
2299: break;
2300: }
2301: case ACTION:
2302: case SEMI:
2303: case STRING_LITERAL:
2304: case CHAR_LITERAL:
2305: case STAR:
2306: case TOKEN_REF:
2307: case BANG:
2308: case LPAREN:
2309: case OR:
2310: case RPAREN:
2311: case SEMPRED:
2312: case ROOT:
2313: case RULE_REF:
2314: case NOT:
2315: case TREE_BEGIN:
2316: case QUESTION:
2317: case PLUS:
2318: case WILDCARD:
2319: case REWRITE: {
2320: break;
2321: }
2322: default: {
2323: throw new NoViableAltException(LT(1),
2324: getFilename());
2325: }
2326: }
2327: }
2328: {
2329: switch (LA(1)) {
2330: case ROOT: {
2331: GrammarAST tmp52_AST = null;
2332: tmp52_AST = (GrammarAST) astFactory
2333: .create(LT(1));
2334: astFactory.makeASTRoot(currentAST, tmp52_AST);
2335: match(ROOT);
2336: break;
2337: }
2338: case BANG: {
2339: GrammarAST tmp53_AST = null;
2340: tmp53_AST = (GrammarAST) astFactory
2341: .create(LT(1));
2342: astFactory.makeASTRoot(currentAST, tmp53_AST);
2343: match(BANG);
2344: break;
2345: }
2346: case ACTION:
2347: case SEMI:
2348: case STRING_LITERAL:
2349: case CHAR_LITERAL:
2350: case STAR:
2351: case TOKEN_REF:
2352: case LPAREN:
2353: case OR:
2354: case RPAREN:
2355: case SEMPRED:
2356: case RULE_REF:
2357: case NOT:
2358: case TREE_BEGIN:
2359: case QUESTION:
2360: case PLUS:
2361: case WILDCARD:
2362: case REWRITE: {
2363: break;
2364: }
2365: default: {
2366: throw new NoViableAltException(LT(1),
2367: getFilename());
2368: }
2369: }
2370: }
2371: atom_AST = (GrammarAST) currentAST.root;
2372: break;
2373: }
2374: default:
2375: if ((LA(1) == CHAR_LITERAL) && (LA(2) == RANGE)) {
2376: range();
2377: astFactory.addASTChild(currentAST, returnAST);
2378: {
2379: switch (LA(1)) {
2380: case ROOT: {
2381: GrammarAST tmp54_AST = null;
2382: tmp54_AST = (GrammarAST) astFactory
2383: .create(LT(1));
2384: astFactory.makeASTRoot(currentAST,
2385: tmp54_AST);
2386: match(ROOT);
2387: break;
2388: }
2389: case BANG: {
2390: GrammarAST tmp55_AST = null;
2391: tmp55_AST = (GrammarAST) astFactory
2392: .create(LT(1));
2393: astFactory.makeASTRoot(currentAST,
2394: tmp55_AST);
2395: match(BANG);
2396: break;
2397: }
2398: case ACTION:
2399: case SEMI:
2400: case STRING_LITERAL:
2401: case CHAR_LITERAL:
2402: case STAR:
2403: case TOKEN_REF:
2404: case LPAREN:
2405: case OR:
2406: case RPAREN:
2407: case SEMPRED:
2408: case RULE_REF:
2409: case NOT:
2410: case TREE_BEGIN:
2411: case QUESTION:
2412: case PLUS:
2413: case WILDCARD:
2414: case REWRITE: {
2415: break;
2416: }
2417: default: {
2418: throw new NoViableAltException(LT(1),
2419: getFilename());
2420: }
2421: }
2422: }
2423: atom_AST = (GrammarAST) currentAST.root;
2424: } else if ((_tokenSet_25.member(LA(1)))
2425: && (_tokenSet_26.member(LA(2)))) {
2426: terminal();
2427: astFactory.addASTChild(currentAST, returnAST);
2428: atom_AST = (GrammarAST) currentAST.root;
2429: } else {
2430: throw new NoViableAltException(LT(1), getFilename());
2431: }
2432: }
2433: } catch (RecognitionException ex) {
2434: reportError(ex);
2435: recover(ex, _tokenSet_27);
2436: }
2437: returnAST = atom_AST;
2438: }
2439:
2440: public final GrammarAST ebnfSuffix(GrammarAST elemAST,
2441: boolean inRewrite) throws RecognitionException,
2442: TokenStreamException {
2443: GrammarAST subrule = null;
2444:
2445: returnAST = null;
2446: ASTPair currentAST = new ASTPair();
2447: GrammarAST ebnfSuffix_AST = null;
2448:
2449: GrammarAST ebnfRoot = null;
2450:
2451: try { // for error handling
2452: {
2453: switch (LA(1)) {
2454: case QUESTION: {
2455: GrammarAST tmp56_AST = null;
2456: tmp56_AST = (GrammarAST) astFactory.create(LT(1));
2457: match(QUESTION);
2458: ebnfRoot = (GrammarAST) astFactory.create(OPTIONAL,
2459: "?");
2460: break;
2461: }
2462: case STAR: {
2463: GrammarAST tmp57_AST = null;
2464: tmp57_AST = (GrammarAST) astFactory.create(LT(1));
2465: match(STAR);
2466: ebnfRoot = (GrammarAST) astFactory.create(CLOSURE,
2467: "*");
2468: break;
2469: }
2470: case PLUS: {
2471: GrammarAST tmp58_AST = null;
2472: tmp58_AST = (GrammarAST) astFactory.create(LT(1));
2473: match(PLUS);
2474: ebnfRoot = (GrammarAST) astFactory.create(
2475: POSITIVE_CLOSURE, "+");
2476: break;
2477: }
2478: default: {
2479: throw new NoViableAltException(LT(1), getFilename());
2480: }
2481: }
2482: }
2483:
2484: GrammarAST save = currentBlockAST;
2485: ebnfRoot.setLine(elemAST.getLine());
2486: ebnfRoot.setColumn(elemAST.getColumn());
2487: GrammarAST blkRoot = (GrammarAST) astFactory.create(BLOCK,
2488: "BLOCK");
2489: currentBlockAST = blkRoot;
2490: GrammarAST eob = (GrammarAST) astFactory.create(EOB,
2491: "<end-of-block>");
2492: eob.setLine(elemAST.getLine());
2493: eob.setColumn(elemAST.getColumn());
2494: GrammarAST alt = (GrammarAST) astFactory
2495: .make((new ASTArray(3)).add(
2496: (GrammarAST) astFactory.create(ALT, "ALT"))
2497: .add(elemAST).add(
2498: (GrammarAST) astFactory.create(EOA,
2499: "<end-of-alt>")));
2500: if (!inRewrite) {
2501: prefixWithSynPred(alt);
2502: }
2503: subrule = (GrammarAST) astFactory
2504: .make((new ASTArray(2)).add(ebnfRoot)
2505: .add(
2506: (GrammarAST) astFactory
2507: .make((new ASTArray(3))
2508: .add(blkRoot).add(
2509: alt).add(
2510: eob))));
2511: currentBlockAST = save;
2512:
2513: } catch (RecognitionException ex) {
2514: reportError(ex);
2515: recover(ex, _tokenSet_28);
2516: }
2517: returnAST = ebnfSuffix_AST;
2518: return subrule;
2519: }
2520:
2521: /** matches ENBF blocks (and sets via block rule) */
2522: public final void ebnf() throws RecognitionException,
2523: TokenStreamException {
2524:
2525: returnAST = null;
2526: ASTPair currentAST = new ASTPair();
2527: GrammarAST ebnf_AST = null;
2528: GrammarAST b_AST = null;
2529:
2530: int line = LT(1).getLine();
2531: int col = LT(1).getColumn();
2532:
2533: try { // for error handling
2534: block();
2535: b_AST = (GrammarAST) returnAST;
2536: {
2537: switch (LA(1)) {
2538: case QUESTION: {
2539: GrammarAST tmp59_AST = null;
2540: tmp59_AST = (GrammarAST) astFactory.create(LT(1));
2541: match(QUESTION);
2542: ebnf_AST = (GrammarAST) currentAST.root;
2543: ebnf_AST = (GrammarAST) astFactory
2544: .make((new ASTArray(2)).add(
2545: (GrammarAST) astFactory.create(
2546: OPTIONAL, "?")).add(b_AST));
2547: currentAST.root = ebnf_AST;
2548: currentAST.child = ebnf_AST != null
2549: && ebnf_AST.getFirstChild() != null ? ebnf_AST
2550: .getFirstChild()
2551: : ebnf_AST;
2552: currentAST.advanceChildToEnd();
2553: break;
2554: }
2555: case STAR: {
2556: GrammarAST tmp60_AST = null;
2557: tmp60_AST = (GrammarAST) astFactory.create(LT(1));
2558: match(STAR);
2559: ebnf_AST = (GrammarAST) currentAST.root;
2560: ebnf_AST = (GrammarAST) astFactory
2561: .make((new ASTArray(2)).add(
2562: (GrammarAST) astFactory.create(
2563: CLOSURE, "*")).add(b_AST));
2564: currentAST.root = ebnf_AST;
2565: currentAST.child = ebnf_AST != null
2566: && ebnf_AST.getFirstChild() != null ? ebnf_AST
2567: .getFirstChild()
2568: : ebnf_AST;
2569: currentAST.advanceChildToEnd();
2570: break;
2571: }
2572: case PLUS: {
2573: GrammarAST tmp61_AST = null;
2574: tmp61_AST = (GrammarAST) astFactory.create(LT(1));
2575: match(PLUS);
2576: ebnf_AST = (GrammarAST) currentAST.root;
2577: ebnf_AST = (GrammarAST) astFactory
2578: .make((new ASTArray(2)).add(
2579: (GrammarAST) astFactory.create(
2580: POSITIVE_CLOSURE, "+"))
2581: .add(b_AST));
2582: currentAST.root = ebnf_AST;
2583: currentAST.child = ebnf_AST != null
2584: && ebnf_AST.getFirstChild() != null ? ebnf_AST
2585: .getFirstChild()
2586: : ebnf_AST;
2587: currentAST.advanceChildToEnd();
2588: break;
2589: }
2590: case IMPLIES: {
2591: match(IMPLIES);
2592: ebnf_AST = (GrammarAST) currentAST.root;
2593:
2594: if (gtype == COMBINED_GRAMMAR
2595: && Character.isUpperCase(currentRuleName
2596: .charAt(0))) {
2597: // ignore for lexer rules in combined
2598: ebnf_AST = (GrammarAST) astFactory
2599: .make((new ASTArray(2)).add(
2600: (GrammarAST) astFactory.create(
2601: SYNPRED, "=>")).add(
2602: b_AST));
2603: } else {
2604: // create manually specified (...)=> predicate;
2605: // convert to sempred
2606: ebnf_AST = createSynSemPredFromBlock(b_AST,
2607: SYN_SEMPRED);
2608: }
2609:
2610: currentAST.root = ebnf_AST;
2611: currentAST.child = ebnf_AST != null
2612: && ebnf_AST.getFirstChild() != null ? ebnf_AST
2613: .getFirstChild()
2614: : ebnf_AST;
2615: currentAST.advanceChildToEnd();
2616: break;
2617: }
2618: case ROOT: {
2619: GrammarAST tmp63_AST = null;
2620: tmp63_AST = (GrammarAST) astFactory.create(LT(1));
2621: match(ROOT);
2622: ebnf_AST = (GrammarAST) currentAST.root;
2623: ebnf_AST = (GrammarAST) astFactory
2624: .make((new ASTArray(2)).add(tmp63_AST).add(
2625: b_AST));
2626: currentAST.root = ebnf_AST;
2627: currentAST.child = ebnf_AST != null
2628: && ebnf_AST.getFirstChild() != null ? ebnf_AST
2629: .getFirstChild()
2630: : ebnf_AST;
2631: currentAST.advanceChildToEnd();
2632: break;
2633: }
2634: case BANG: {
2635: GrammarAST tmp64_AST = null;
2636: tmp64_AST = (GrammarAST) astFactory.create(LT(1));
2637: match(BANG);
2638: ebnf_AST = (GrammarAST) currentAST.root;
2639: ebnf_AST = (GrammarAST) astFactory
2640: .make((new ASTArray(2)).add(tmp64_AST).add(
2641: b_AST));
2642: currentAST.root = ebnf_AST;
2643: currentAST.child = ebnf_AST != null
2644: && ebnf_AST.getFirstChild() != null ? ebnf_AST
2645: .getFirstChild()
2646: : ebnf_AST;
2647: currentAST.advanceChildToEnd();
2648: break;
2649: }
2650: case ACTION:
2651: case SEMI:
2652: case STRING_LITERAL:
2653: case CHAR_LITERAL:
2654: case TOKEN_REF:
2655: case LPAREN:
2656: case OR:
2657: case RPAREN:
2658: case SEMPRED:
2659: case RULE_REF:
2660: case NOT:
2661: case TREE_BEGIN:
2662: case WILDCARD:
2663: case REWRITE: {
2664: ebnf_AST = (GrammarAST) currentAST.root;
2665: ebnf_AST = b_AST;
2666: currentAST.root = ebnf_AST;
2667: currentAST.child = ebnf_AST != null
2668: && ebnf_AST.getFirstChild() != null ? ebnf_AST
2669: .getFirstChild()
2670: : ebnf_AST;
2671: currentAST.advanceChildToEnd();
2672: break;
2673: }
2674: default: {
2675: throw new NoViableAltException(LT(1), getFilename());
2676: }
2677: }
2678: }
2679: ebnf_AST = (GrammarAST) currentAST.root;
2680: ebnf_AST.setLine(line);
2681: ebnf_AST.setColumn(col);
2682: } catch (RecognitionException ex) {
2683: reportError(ex);
2684: recover(ex, _tokenSet_21);
2685: }
2686: returnAST = ebnf_AST;
2687: }
2688:
2689: public final void tree() throws RecognitionException,
2690: TokenStreamException {
2691:
2692: returnAST = null;
2693: ASTPair currentAST = new ASTPair();
2694: GrammarAST tree_AST = null;
2695:
2696: try { // for error handling
2697: GrammarAST tmp65_AST = null;
2698: tmp65_AST = (GrammarAST) astFactory.create(LT(1));
2699: astFactory.makeASTRoot(currentAST, tmp65_AST);
2700: match(TREE_BEGIN);
2701: element();
2702: astFactory.addASTChild(currentAST, returnAST);
2703: {
2704: int _cnt88 = 0;
2705: _loop88: do {
2706: if ((_tokenSet_18.member(LA(1)))) {
2707: element();
2708: astFactory.addASTChild(currentAST, returnAST);
2709: } else {
2710: if (_cnt88 >= 1) {
2711: break _loop88;
2712: } else {
2713: throw new NoViableAltException(LT(1),
2714: getFilename());
2715: }
2716: }
2717:
2718: _cnt88++;
2719: } while (true);
2720: }
2721: match(RPAREN);
2722: tree_AST = (GrammarAST) currentAST.root;
2723: } catch (RecognitionException ex) {
2724: reportError(ex);
2725: recover(ex, _tokenSet_21);
2726: }
2727: returnAST = tree_AST;
2728: }
2729:
2730: public final void range() throws RecognitionException,
2731: TokenStreamException {
2732:
2733: returnAST = null;
2734: ASTPair currentAST = new ASTPair();
2735: GrammarAST range_AST = null;
2736: Token c1 = null;
2737: GrammarAST c1_AST = null;
2738: Token c2 = null;
2739: GrammarAST c2_AST = null;
2740:
2741: GrammarAST subrule = null, root = null;
2742:
2743: try { // for error handling
2744: c1 = LT(1);
2745: c1_AST = (GrammarAST) astFactory.create(c1);
2746: match(CHAR_LITERAL);
2747: GrammarAST tmp67_AST = null;
2748: tmp67_AST = (GrammarAST) astFactory.create(LT(1));
2749: match(RANGE);
2750: c2 = LT(1);
2751: c2_AST = (GrammarAST) astFactory.create(c2);
2752: match(CHAR_LITERAL);
2753: range_AST = (GrammarAST) currentAST.root;
2754:
2755: GrammarAST r = (GrammarAST) astFactory.create(CHAR_RANGE,
2756: "..");
2757: r.setLine(c1.getLine());
2758: r.setColumn(c1.getColumn());
2759: range_AST = (GrammarAST) astFactory.make((new ASTArray(3))
2760: .add(r).add(c1_AST).add(c2_AST));
2761: root = range_AST;
2762:
2763: currentAST.root = range_AST;
2764: currentAST.child = range_AST != null
2765: && range_AST.getFirstChild() != null ? range_AST
2766: .getFirstChild() : range_AST;
2767: currentAST.advanceChildToEnd();
2768: } catch (RecognitionException ex) {
2769: reportError(ex);
2770: recover(ex, _tokenSet_29);
2771: }
2772: returnAST = range_AST;
2773: }
2774:
2775: public final void terminal() throws RecognitionException,
2776: TokenStreamException {
2777:
2778: returnAST = null;
2779: ASTPair currentAST = new ASTPair();
2780: GrammarAST terminal_AST = null;
2781: Token cl = null;
2782: GrammarAST cl_AST = null;
2783: Token tr = null;
2784: GrammarAST tr_AST = null;
2785: Token sl = null;
2786: GrammarAST sl_AST = null;
2787: Token wi = null;
2788: GrammarAST wi_AST = null;
2789:
2790: GrammarAST ebnfRoot = null, subrule = null;
2791:
2792: try { // for error handling
2793: switch (LA(1)) {
2794: case CHAR_LITERAL: {
2795: cl = LT(1);
2796: cl_AST = (GrammarAST) astFactory.create(cl);
2797: astFactory.makeASTRoot(currentAST, cl_AST);
2798: match(CHAR_LITERAL);
2799: {
2800: switch (LA(1)) {
2801: case ROOT: {
2802: GrammarAST tmp68_AST = null;
2803: tmp68_AST = (GrammarAST) astFactory
2804: .create(LT(1));
2805: astFactory.makeASTRoot(currentAST, tmp68_AST);
2806: match(ROOT);
2807: break;
2808: }
2809: case BANG: {
2810: GrammarAST tmp69_AST = null;
2811: tmp69_AST = (GrammarAST) astFactory
2812: .create(LT(1));
2813: astFactory.makeASTRoot(currentAST, tmp69_AST);
2814: match(BANG);
2815: break;
2816: }
2817: case ACTION:
2818: case SEMI:
2819: case STRING_LITERAL:
2820: case CHAR_LITERAL:
2821: case STAR:
2822: case TOKEN_REF:
2823: case LPAREN:
2824: case OR:
2825: case RPAREN:
2826: case SEMPRED:
2827: case RULE_REF:
2828: case NOT:
2829: case TREE_BEGIN:
2830: case QUESTION:
2831: case PLUS:
2832: case WILDCARD:
2833: case REWRITE: {
2834: break;
2835: }
2836: default: {
2837: throw new NoViableAltException(LT(1),
2838: getFilename());
2839: }
2840: }
2841: }
2842: terminal_AST = (GrammarAST) currentAST.root;
2843: break;
2844: }
2845: case TOKEN_REF: {
2846: tr = LT(1);
2847: tr_AST = (GrammarAST) astFactory.create(tr);
2848: astFactory.makeASTRoot(currentAST, tr_AST);
2849: match(TOKEN_REF);
2850: {
2851: switch (LA(1)) {
2852: case ARG_ACTION: {
2853: GrammarAST tmp70_AST = null;
2854: tmp70_AST = (GrammarAST) astFactory
2855: .create(LT(1));
2856: astFactory.addASTChild(currentAST, tmp70_AST);
2857: match(ARG_ACTION);
2858: break;
2859: }
2860: case ACTION:
2861: case SEMI:
2862: case STRING_LITERAL:
2863: case CHAR_LITERAL:
2864: case STAR:
2865: case TOKEN_REF:
2866: case BANG:
2867: case LPAREN:
2868: case OR:
2869: case RPAREN:
2870: case SEMPRED:
2871: case ROOT:
2872: case RULE_REF:
2873: case NOT:
2874: case TREE_BEGIN:
2875: case QUESTION:
2876: case PLUS:
2877: case WILDCARD:
2878: case REWRITE: {
2879: break;
2880: }
2881: default: {
2882: throw new NoViableAltException(LT(1),
2883: getFilename());
2884: }
2885: }
2886: }
2887: {
2888: switch (LA(1)) {
2889: case ROOT: {
2890: GrammarAST tmp71_AST = null;
2891: tmp71_AST = (GrammarAST) astFactory
2892: .create(LT(1));
2893: astFactory.makeASTRoot(currentAST, tmp71_AST);
2894: match(ROOT);
2895: break;
2896: }
2897: case BANG: {
2898: GrammarAST tmp72_AST = null;
2899: tmp72_AST = (GrammarAST) astFactory
2900: .create(LT(1));
2901: astFactory.makeASTRoot(currentAST, tmp72_AST);
2902: match(BANG);
2903: break;
2904: }
2905: case ACTION:
2906: case SEMI:
2907: case STRING_LITERAL:
2908: case CHAR_LITERAL:
2909: case STAR:
2910: case TOKEN_REF:
2911: case LPAREN:
2912: case OR:
2913: case RPAREN:
2914: case SEMPRED:
2915: case RULE_REF:
2916: case NOT:
2917: case TREE_BEGIN:
2918: case QUESTION:
2919: case PLUS:
2920: case WILDCARD:
2921: case REWRITE: {
2922: break;
2923: }
2924: default: {
2925: throw new NoViableAltException(LT(1),
2926: getFilename());
2927: }
2928: }
2929: }
2930: terminal_AST = (GrammarAST) currentAST.root;
2931: break;
2932: }
2933: case STRING_LITERAL: {
2934: sl = LT(1);
2935: sl_AST = (GrammarAST) astFactory.create(sl);
2936: astFactory.addASTChild(currentAST, sl_AST);
2937: match(STRING_LITERAL);
2938: {
2939: switch (LA(1)) {
2940: case ROOT: {
2941: GrammarAST tmp73_AST = null;
2942: tmp73_AST = (GrammarAST) astFactory
2943: .create(LT(1));
2944: astFactory.makeASTRoot(currentAST, tmp73_AST);
2945: match(ROOT);
2946: break;
2947: }
2948: case BANG: {
2949: GrammarAST tmp74_AST = null;
2950: tmp74_AST = (GrammarAST) astFactory
2951: .create(LT(1));
2952: astFactory.makeASTRoot(currentAST, tmp74_AST);
2953: match(BANG);
2954: break;
2955: }
2956: case ACTION:
2957: case SEMI:
2958: case STRING_LITERAL:
2959: case CHAR_LITERAL:
2960: case STAR:
2961: case TOKEN_REF:
2962: case LPAREN:
2963: case OR:
2964: case RPAREN:
2965: case SEMPRED:
2966: case RULE_REF:
2967: case NOT:
2968: case TREE_BEGIN:
2969: case QUESTION:
2970: case PLUS:
2971: case WILDCARD:
2972: case REWRITE: {
2973: break;
2974: }
2975: default: {
2976: throw new NoViableAltException(LT(1),
2977: getFilename());
2978: }
2979: }
2980: }
2981: terminal_AST = (GrammarAST) currentAST.root;
2982: break;
2983: }
2984: case WILDCARD: {
2985: wi = LT(1);
2986: wi_AST = (GrammarAST) astFactory.create(wi);
2987: astFactory.addASTChild(currentAST, wi_AST);
2988: match(WILDCARD);
2989: {
2990: switch (LA(1)) {
2991: case ROOT: {
2992: GrammarAST tmp75_AST = null;
2993: tmp75_AST = (GrammarAST) astFactory
2994: .create(LT(1));
2995: astFactory.makeASTRoot(currentAST, tmp75_AST);
2996: match(ROOT);
2997: break;
2998: }
2999: case BANG: {
3000: GrammarAST tmp76_AST = null;
3001: tmp76_AST = (GrammarAST) astFactory
3002: .create(LT(1));
3003: astFactory.makeASTRoot(currentAST, tmp76_AST);
3004: match(BANG);
3005: break;
3006: }
3007: case ACTION:
3008: case SEMI:
3009: case STRING_LITERAL:
3010: case CHAR_LITERAL:
3011: case STAR:
3012: case TOKEN_REF:
3013: case LPAREN:
3014: case OR:
3015: case RPAREN:
3016: case SEMPRED:
3017: case RULE_REF:
3018: case NOT:
3019: case TREE_BEGIN:
3020: case QUESTION:
3021: case PLUS:
3022: case WILDCARD:
3023: case REWRITE: {
3024: break;
3025: }
3026: default: {
3027: throw new NoViableAltException(LT(1),
3028: getFilename());
3029: }
3030: }
3031: }
3032: terminal_AST = (GrammarAST) currentAST.root;
3033: break;
3034: }
3035: default: {
3036: throw new NoViableAltException(LT(1), getFilename());
3037: }
3038: }
3039: } catch (RecognitionException ex) {
3040: reportError(ex);
3041: recover(ex, _tokenSet_27);
3042: }
3043: returnAST = terminal_AST;
3044: }
3045:
3046: public final void notSet() throws RecognitionException,
3047: TokenStreamException {
3048:
3049: returnAST = null;
3050: ASTPair currentAST = new ASTPair();
3051: GrammarAST notSet_AST = null;
3052: Token n = null;
3053: GrammarAST n_AST = null;
3054:
3055: int line = LT(1).getLine();
3056: int col = LT(1).getColumn();
3057: GrammarAST subrule = null;
3058:
3059: try { // for error handling
3060: n = LT(1);
3061: n_AST = (GrammarAST) astFactory.create(n);
3062: astFactory.makeASTRoot(currentAST, n_AST);
3063: match(NOT);
3064: {
3065: switch (LA(1)) {
3066: case STRING_LITERAL:
3067: case CHAR_LITERAL:
3068: case TOKEN_REF: {
3069: notTerminal();
3070: astFactory.addASTChild(currentAST, returnAST);
3071: break;
3072: }
3073: case LPAREN: {
3074: block();
3075: astFactory.addASTChild(currentAST, returnAST);
3076: break;
3077: }
3078: default: {
3079: throw new NoViableAltException(LT(1), getFilename());
3080: }
3081: }
3082: }
3083: notSet_AST = (GrammarAST) currentAST.root;
3084: notSet_AST.setLine(line);
3085: notSet_AST.setColumn(col);
3086: notSet_AST = (GrammarAST) currentAST.root;
3087: } catch (RecognitionException ex) {
3088: reportError(ex);
3089: recover(ex, _tokenSet_29);
3090: }
3091: returnAST = notSet_AST;
3092: }
3093:
3094: public final void notTerminal() throws RecognitionException,
3095: TokenStreamException {
3096:
3097: returnAST = null;
3098: ASTPair currentAST = new ASTPair();
3099: GrammarAST notTerminal_AST = null;
3100: Token cl = null;
3101: GrammarAST cl_AST = null;
3102: Token tr = null;
3103: GrammarAST tr_AST = null;
3104:
3105: try { // for error handling
3106: switch (LA(1)) {
3107: case CHAR_LITERAL: {
3108: cl = LT(1);
3109: cl_AST = (GrammarAST) astFactory.create(cl);
3110: astFactory.addASTChild(currentAST, cl_AST);
3111: match(CHAR_LITERAL);
3112: notTerminal_AST = (GrammarAST) currentAST.root;
3113: break;
3114: }
3115: case TOKEN_REF: {
3116: tr = LT(1);
3117: tr_AST = (GrammarAST) astFactory.create(tr);
3118: astFactory.addASTChild(currentAST, tr_AST);
3119: match(TOKEN_REF);
3120: notTerminal_AST = (GrammarAST) currentAST.root;
3121: break;
3122: }
3123: case STRING_LITERAL: {
3124: GrammarAST tmp77_AST = null;
3125: tmp77_AST = (GrammarAST) astFactory.create(LT(1));
3126: astFactory.addASTChild(currentAST, tmp77_AST);
3127: match(STRING_LITERAL);
3128: notTerminal_AST = (GrammarAST) currentAST.root;
3129: break;
3130: }
3131: default: {
3132: throw new NoViableAltException(LT(1), getFilename());
3133: }
3134: }
3135: } catch (RecognitionException ex) {
3136: reportError(ex);
3137: recover(ex, _tokenSet_29);
3138: }
3139: returnAST = notTerminal_AST;
3140: }
3141:
3142: /** Match anything that looks like an ID and return tree as token type ID */
3143: public final void idToken() throws RecognitionException,
3144: TokenStreamException {
3145:
3146: returnAST = null;
3147: ASTPair currentAST = new ASTPair();
3148: GrammarAST idToken_AST = null;
3149:
3150: try { // for error handling
3151: switch (LA(1)) {
3152: case TOKEN_REF: {
3153: GrammarAST tmp78_AST = null;
3154: tmp78_AST = (GrammarAST) astFactory.create(LT(1));
3155: astFactory.addASTChild(currentAST, tmp78_AST);
3156: match(TOKEN_REF);
3157: idToken_AST = (GrammarAST) currentAST.root;
3158: idToken_AST.setType(ID);
3159: idToken_AST = (GrammarAST) currentAST.root;
3160: break;
3161: }
3162: case RULE_REF: {
3163: GrammarAST tmp79_AST = null;
3164: tmp79_AST = (GrammarAST) astFactory.create(LT(1));
3165: astFactory.addASTChild(currentAST, tmp79_AST);
3166: match(RULE_REF);
3167: idToken_AST = (GrammarAST) currentAST.root;
3168: idToken_AST.setType(ID);
3169: idToken_AST = (GrammarAST) currentAST.root;
3170: break;
3171: }
3172: default: {
3173: throw new NoViableAltException(LT(1), getFilename());
3174: }
3175: }
3176: } catch (RecognitionException ex) {
3177: reportError(ex);
3178: recover(ex, _tokenSet_0);
3179: }
3180: returnAST = idToken_AST;
3181: }
3182:
3183: public final void rewrite_alternative()
3184: throws RecognitionException, TokenStreamException {
3185:
3186: returnAST = null;
3187: ASTPair currentAST = new ASTPair();
3188: GrammarAST rewrite_alternative_AST = null;
3189:
3190: GrammarAST eoa = (GrammarAST) astFactory.create(EOA,
3191: "<end-of-alt>");
3192: GrammarAST altRoot = (GrammarAST) astFactory.create(ALT, "ALT");
3193: altRoot.setLine(LT(1).getLine());
3194: altRoot.setColumn(LT(1).getColumn());
3195:
3196: try { // for error handling
3197: if (((_tokenSet_30.member(LA(1))) && (_tokenSet_31
3198: .member(LA(2))))
3199: && (grammar.buildTemplate())) {
3200: rewrite_template();
3201: astFactory.addASTChild(currentAST, returnAST);
3202: rewrite_alternative_AST = (GrammarAST) currentAST.root;
3203: } else if (((_tokenSet_32.member(LA(1))) && (_tokenSet_33
3204: .member(LA(2))))
3205: && (grammar.buildAST())) {
3206: {
3207: int _cnt112 = 0;
3208: _loop112: do {
3209: if ((_tokenSet_32.member(LA(1)))) {
3210: rewrite_element();
3211: astFactory.addASTChild(currentAST,
3212: returnAST);
3213: } else {
3214: if (_cnt112 >= 1) {
3215: break _loop112;
3216: } else {
3217: throw new NoViableAltException(LT(1),
3218: getFilename());
3219: }
3220: }
3221:
3222: _cnt112++;
3223: } while (true);
3224: }
3225: rewrite_alternative_AST = (GrammarAST) currentAST.root;
3226:
3227: if (rewrite_alternative_AST == null) {
3228: rewrite_alternative_AST = (GrammarAST) astFactory
3229: .make((new ASTArray(3)).add(altRoot).add(
3230: (GrammarAST) astFactory.create(
3231: EPSILON, "epsilon")).add(
3232: eoa));
3233: } else {
3234: rewrite_alternative_AST = (GrammarAST) astFactory
3235: .make((new ASTArray(3)).add(altRoot).add(
3236: rewrite_alternative_AST).add(eoa));
3237: }
3238:
3239: currentAST.root = rewrite_alternative_AST;
3240: currentAST.child = rewrite_alternative_AST != null
3241: && rewrite_alternative_AST.getFirstChild() != null ? rewrite_alternative_AST
3242: .getFirstChild()
3243: : rewrite_alternative_AST;
3244: currentAST.advanceChildToEnd();
3245: rewrite_alternative_AST = (GrammarAST) currentAST.root;
3246: } else if ((_tokenSet_19.member(LA(1)))) {
3247: rewrite_alternative_AST = (GrammarAST) currentAST.root;
3248: rewrite_alternative_AST = (GrammarAST) astFactory
3249: .make((new ASTArray(3)).add(altRoot).add(
3250: (GrammarAST) astFactory.create(EPSILON,
3251: "epsilon")).add(eoa));
3252: currentAST.root = rewrite_alternative_AST;
3253: currentAST.child = rewrite_alternative_AST != null
3254: && rewrite_alternative_AST.getFirstChild() != null ? rewrite_alternative_AST
3255: .getFirstChild()
3256: : rewrite_alternative_AST;
3257: currentAST.advanceChildToEnd();
3258: rewrite_alternative_AST = (GrammarAST) currentAST.root;
3259: } else {
3260: throw new NoViableAltException(LT(1), getFilename());
3261: }
3262:
3263: } catch (RecognitionException ex) {
3264: reportError(ex);
3265: recover(ex, _tokenSet_19);
3266: }
3267: returnAST = rewrite_alternative_AST;
3268: }
3269:
3270: public final void rewrite_block() throws RecognitionException,
3271: TokenStreamException {
3272:
3273: returnAST = null;
3274: ASTPair currentAST = new ASTPair();
3275: GrammarAST rewrite_block_AST = null;
3276: Token lp = null;
3277: GrammarAST lp_AST = null;
3278:
3279: try { // for error handling
3280: lp = LT(1);
3281: lp_AST = (GrammarAST) astFactory.create(lp);
3282: astFactory.makeASTRoot(currentAST, lp_AST);
3283: match(LPAREN);
3284: lp_AST.setType(BLOCK);
3285: lp_AST.setText("BLOCK");
3286: rewrite_alternative();
3287: astFactory.addASTChild(currentAST, returnAST);
3288: match(RPAREN);
3289: rewrite_block_AST = (GrammarAST) currentAST.root;
3290:
3291: GrammarAST eob = (GrammarAST) astFactory.create(EOB,
3292: "<end-of-block>");
3293: eob.setLine(lp.getLine());
3294: eob.setColumn(lp.getColumn());
3295: rewrite_block_AST.addChild(eob);
3296:
3297: rewrite_block_AST = (GrammarAST) currentAST.root;
3298: } catch (RecognitionException ex) {
3299: reportError(ex);
3300: recover(ex, _tokenSet_34);
3301: }
3302: returnAST = rewrite_block_AST;
3303: }
3304:
3305: /** Build a tree for a template rewrite:
3306: ^(TEMPLATE (ID|ACTION) ^(ARGLIST ^(ARG ID ACTION) ...) )
3307: where ARGLIST is always there even if no args exist.
3308: ID can be "template" keyword. If first child is ACTION then it's
3309: an indirect template ref
3310:
3311: -> foo(a={...}, b={...})
3312: -> ({string-e})(a={...}, b={...}) // e evaluates to template name
3313: -> {%{$ID.text}} // create literal template from string (done in ActionTranslator)
3314: -> {st-expr} // st-expr evaluates to ST
3315: */
3316: public final void rewrite_template() throws RecognitionException,
3317: TokenStreamException {
3318:
3319: returnAST = null;
3320: ASTPair currentAST = new ASTPair();
3321: GrammarAST rewrite_template_AST = null;
3322: Token st = null;
3323:
3324: try { // for error handling
3325: switch (LA(1)) {
3326: case LPAREN: {
3327: rewrite_indirect_template_head();
3328: astFactory.addASTChild(currentAST, returnAST);
3329: rewrite_template_AST = (GrammarAST) currentAST.root;
3330: break;
3331: }
3332: case ACTION: {
3333: GrammarAST tmp81_AST = null;
3334: tmp81_AST = (GrammarAST) astFactory.create(LT(1));
3335: astFactory.addASTChild(currentAST, tmp81_AST);
3336: match(ACTION);
3337: rewrite_template_AST = (GrammarAST) currentAST.root;
3338: break;
3339: }
3340: default:
3341: if (((LA(1) == TOKEN_REF || LA(1) == RULE_REF) && (LA(2) == LPAREN))
3342: && (LT(1).getText().equals("template"))) {
3343: rewrite_template_head();
3344: astFactory.addASTChild(currentAST, returnAST);
3345: st = LT(1);
3346: {
3347: switch (LA(1)) {
3348: case DOUBLE_QUOTE_STRING_LITERAL: {
3349: match(DOUBLE_QUOTE_STRING_LITERAL);
3350: break;
3351: }
3352: case DOUBLE_ANGLE_STRING_LITERAL: {
3353: match(DOUBLE_ANGLE_STRING_LITERAL);
3354: break;
3355: }
3356: default: {
3357: throw new NoViableAltException(LT(1),
3358: getFilename());
3359: }
3360: }
3361: }
3362: rewrite_template_AST = (GrammarAST) currentAST.root;
3363: rewrite_template_AST
3364: .addChild((GrammarAST) astFactory
3365: .create(st));
3366: rewrite_template_AST = (GrammarAST) currentAST.root;
3367: } else if ((LA(1) == TOKEN_REF || LA(1) == RULE_REF)
3368: && (LA(2) == LPAREN)) {
3369: rewrite_template_head();
3370: astFactory.addASTChild(currentAST, returnAST);
3371: rewrite_template_AST = (GrammarAST) currentAST.root;
3372: } else {
3373: throw new NoViableAltException(LT(1), getFilename());
3374: }
3375: }
3376: } catch (RecognitionException ex) {
3377: reportError(ex);
3378: recover(ex, _tokenSet_19);
3379: }
3380: returnAST = rewrite_template_AST;
3381: }
3382:
3383: public final void rewrite_element() throws RecognitionException,
3384: TokenStreamException {
3385:
3386: returnAST = null;
3387: ASTPair currentAST = new ASTPair();
3388: GrammarAST rewrite_element_AST = null;
3389: GrammarAST t_AST = null;
3390: GrammarAST tr_AST = null;
3391:
3392: GrammarAST subrule = null;
3393:
3394: try { // for error handling
3395: switch (LA(1)) {
3396: case ACTION:
3397: case STRING_LITERAL:
3398: case CHAR_LITERAL:
3399: case TOKEN_REF:
3400: case RULE_REF:
3401: case DOLLAR: {
3402: rewrite_atom();
3403: t_AST = (GrammarAST) returnAST;
3404: astFactory.addASTChild(currentAST, returnAST);
3405: {
3406: switch (LA(1)) {
3407: case STAR:
3408: case QUESTION:
3409: case PLUS: {
3410: subrule = ebnfSuffix(t_AST, true);
3411: astFactory.addASTChild(currentAST, returnAST);
3412: rewrite_element_AST = (GrammarAST) currentAST.root;
3413: rewrite_element_AST = subrule;
3414: currentAST.root = rewrite_element_AST;
3415: currentAST.child = rewrite_element_AST != null
3416: && rewrite_element_AST.getFirstChild() != null ? rewrite_element_AST
3417: .getFirstChild()
3418: : rewrite_element_AST;
3419: currentAST.advanceChildToEnd();
3420: break;
3421: }
3422: case ACTION:
3423: case SEMI:
3424: case STRING_LITERAL:
3425: case CHAR_LITERAL:
3426: case TOKEN_REF:
3427: case LPAREN:
3428: case OR:
3429: case RPAREN:
3430: case RULE_REF:
3431: case TREE_BEGIN:
3432: case REWRITE:
3433: case DOLLAR: {
3434: break;
3435: }
3436: default: {
3437: throw new NoViableAltException(LT(1),
3438: getFilename());
3439: }
3440: }
3441: }
3442: rewrite_element_AST = (GrammarAST) currentAST.root;
3443: break;
3444: }
3445: case LPAREN: {
3446: rewrite_ebnf();
3447: astFactory.addASTChild(currentAST, returnAST);
3448: rewrite_element_AST = (GrammarAST) currentAST.root;
3449: break;
3450: }
3451: case TREE_BEGIN: {
3452: rewrite_tree();
3453: tr_AST = (GrammarAST) returnAST;
3454: astFactory.addASTChild(currentAST, returnAST);
3455: {
3456: switch (LA(1)) {
3457: case STAR:
3458: case QUESTION:
3459: case PLUS: {
3460: subrule = ebnfSuffix(tr_AST, true);
3461: astFactory.addASTChild(currentAST, returnAST);
3462: rewrite_element_AST = (GrammarAST) currentAST.root;
3463: rewrite_element_AST = subrule;
3464: currentAST.root = rewrite_element_AST;
3465: currentAST.child = rewrite_element_AST != null
3466: && rewrite_element_AST.getFirstChild() != null ? rewrite_element_AST
3467: .getFirstChild()
3468: : rewrite_element_AST;
3469: currentAST.advanceChildToEnd();
3470: break;
3471: }
3472: case ACTION:
3473: case SEMI:
3474: case STRING_LITERAL:
3475: case CHAR_LITERAL:
3476: case TOKEN_REF:
3477: case LPAREN:
3478: case OR:
3479: case RPAREN:
3480: case RULE_REF:
3481: case TREE_BEGIN:
3482: case REWRITE:
3483: case DOLLAR: {
3484: break;
3485: }
3486: default: {
3487: throw new NoViableAltException(LT(1),
3488: getFilename());
3489: }
3490: }
3491: }
3492: rewrite_element_AST = (GrammarAST) currentAST.root;
3493: break;
3494: }
3495: default: {
3496: throw new NoViableAltException(LT(1), getFilename());
3497: }
3498: }
3499: } catch (RecognitionException ex) {
3500: reportError(ex);
3501: recover(ex, _tokenSet_35);
3502: }
3503: returnAST = rewrite_element_AST;
3504: }
3505:
3506: public final void rewrite_atom() throws RecognitionException,
3507: TokenStreamException {
3508:
3509: returnAST = null;
3510: ASTPair currentAST = new ASTPair();
3511: GrammarAST rewrite_atom_AST = null;
3512: Token cl = null;
3513: GrammarAST cl_AST = null;
3514: Token tr = null;
3515: GrammarAST tr_AST = null;
3516: Token rr = null;
3517: GrammarAST rr_AST = null;
3518: Token sl = null;
3519: GrammarAST sl_AST = null;
3520: Token d = null;
3521: GrammarAST d_AST = null;
3522: GrammarAST i_AST = null;
3523:
3524: GrammarAST subrule = null;
3525:
3526: try { // for error handling
3527: switch (LA(1)) {
3528: case CHAR_LITERAL: {
3529: cl = LT(1);
3530: cl_AST = (GrammarAST) astFactory.create(cl);
3531: astFactory.addASTChild(currentAST, cl_AST);
3532: match(CHAR_LITERAL);
3533: rewrite_atom_AST = (GrammarAST) currentAST.root;
3534: break;
3535: }
3536: case TOKEN_REF: {
3537: tr = LT(1);
3538: tr_AST = (GrammarAST) astFactory.create(tr);
3539: astFactory.makeASTRoot(currentAST, tr_AST);
3540: match(TOKEN_REF);
3541: {
3542: switch (LA(1)) {
3543: case ARG_ACTION: {
3544: GrammarAST tmp84_AST = null;
3545: tmp84_AST = (GrammarAST) astFactory
3546: .create(LT(1));
3547: astFactory.addASTChild(currentAST, tmp84_AST);
3548: match(ARG_ACTION);
3549: break;
3550: }
3551: case ACTION:
3552: case SEMI:
3553: case STRING_LITERAL:
3554: case CHAR_LITERAL:
3555: case STAR:
3556: case TOKEN_REF:
3557: case LPAREN:
3558: case OR:
3559: case RPAREN:
3560: case RULE_REF:
3561: case TREE_BEGIN:
3562: case QUESTION:
3563: case PLUS:
3564: case REWRITE:
3565: case DOLLAR: {
3566: break;
3567: }
3568: default: {
3569: throw new NoViableAltException(LT(1),
3570: getFilename());
3571: }
3572: }
3573: }
3574: rewrite_atom_AST = (GrammarAST) currentAST.root;
3575: break;
3576: }
3577: case RULE_REF: {
3578: rr = LT(1);
3579: rr_AST = (GrammarAST) astFactory.create(rr);
3580: astFactory.addASTChild(currentAST, rr_AST);
3581: match(RULE_REF);
3582: rewrite_atom_AST = (GrammarAST) currentAST.root;
3583: break;
3584: }
3585: case STRING_LITERAL: {
3586: sl = LT(1);
3587: sl_AST = (GrammarAST) astFactory.create(sl);
3588: astFactory.addASTChild(currentAST, sl_AST);
3589: match(STRING_LITERAL);
3590: rewrite_atom_AST = (GrammarAST) currentAST.root;
3591: break;
3592: }
3593: case DOLLAR: {
3594: d = LT(1);
3595: d_AST = (GrammarAST) astFactory.create(d);
3596: match(DOLLAR);
3597: id();
3598: i_AST = (GrammarAST) returnAST;
3599: rewrite_atom_AST = (GrammarAST) currentAST.root;
3600:
3601: rewrite_atom_AST = (GrammarAST) astFactory.create(
3602: LABEL, i_AST.getText());
3603: rewrite_atom_AST.setLine(d_AST.getLine());
3604: rewrite_atom_AST.setColumn(d_AST.getColumn());
3605: rewrite_atom_AST.setEnclosingRule(currentRuleName);
3606:
3607: currentAST.root = rewrite_atom_AST;
3608: currentAST.child = rewrite_atom_AST != null
3609: && rewrite_atom_AST.getFirstChild() != null ? rewrite_atom_AST
3610: .getFirstChild()
3611: : rewrite_atom_AST;
3612: currentAST.advanceChildToEnd();
3613: break;
3614: }
3615: case ACTION: {
3616: GrammarAST tmp85_AST = null;
3617: tmp85_AST = (GrammarAST) astFactory.create(LT(1));
3618: astFactory.addASTChild(currentAST, tmp85_AST);
3619: match(ACTION);
3620: rewrite_atom_AST = (GrammarAST) currentAST.root;
3621: break;
3622: }
3623: default: {
3624: throw new NoViableAltException(LT(1), getFilename());
3625: }
3626: }
3627: } catch (RecognitionException ex) {
3628: reportError(ex);
3629: recover(ex, _tokenSet_36);
3630: }
3631: returnAST = rewrite_atom_AST;
3632: }
3633:
3634: public final void rewrite_ebnf() throws RecognitionException,
3635: TokenStreamException {
3636:
3637: returnAST = null;
3638: ASTPair currentAST = new ASTPair();
3639: GrammarAST rewrite_ebnf_AST = null;
3640: GrammarAST b_AST = null;
3641:
3642: int line = LT(1).getLine();
3643: int col = LT(1).getColumn();
3644:
3645: try { // for error handling
3646: rewrite_block();
3647: b_AST = (GrammarAST) returnAST;
3648: {
3649: switch (LA(1)) {
3650: case QUESTION: {
3651: GrammarAST tmp86_AST = null;
3652: tmp86_AST = (GrammarAST) astFactory.create(LT(1));
3653: match(QUESTION);
3654: rewrite_ebnf_AST = (GrammarAST) currentAST.root;
3655: rewrite_ebnf_AST = (GrammarAST) astFactory
3656: .make((new ASTArray(2)).add(
3657: (GrammarAST) astFactory.create(
3658: OPTIONAL, "?")).add(b_AST));
3659: currentAST.root = rewrite_ebnf_AST;
3660: currentAST.child = rewrite_ebnf_AST != null
3661: && rewrite_ebnf_AST.getFirstChild() != null ? rewrite_ebnf_AST
3662: .getFirstChild()
3663: : rewrite_ebnf_AST;
3664: currentAST.advanceChildToEnd();
3665: break;
3666: }
3667: case STAR: {
3668: GrammarAST tmp87_AST = null;
3669: tmp87_AST = (GrammarAST) astFactory.create(LT(1));
3670: match(STAR);
3671: rewrite_ebnf_AST = (GrammarAST) currentAST.root;
3672: rewrite_ebnf_AST = (GrammarAST) astFactory
3673: .make((new ASTArray(2)).add(
3674: (GrammarAST) astFactory.create(
3675: CLOSURE, "*")).add(b_AST));
3676: currentAST.root = rewrite_ebnf_AST;
3677: currentAST.child = rewrite_ebnf_AST != null
3678: && rewrite_ebnf_AST.getFirstChild() != null ? rewrite_ebnf_AST
3679: .getFirstChild()
3680: : rewrite_ebnf_AST;
3681: currentAST.advanceChildToEnd();
3682: break;
3683: }
3684: case PLUS: {
3685: GrammarAST tmp88_AST = null;
3686: tmp88_AST = (GrammarAST) astFactory.create(LT(1));
3687: match(PLUS);
3688: rewrite_ebnf_AST = (GrammarAST) currentAST.root;
3689: rewrite_ebnf_AST = (GrammarAST) astFactory
3690: .make((new ASTArray(2)).add(
3691: (GrammarAST) astFactory.create(
3692: POSITIVE_CLOSURE, "+"))
3693: .add(b_AST));
3694: currentAST.root = rewrite_ebnf_AST;
3695: currentAST.child = rewrite_ebnf_AST != null
3696: && rewrite_ebnf_AST.getFirstChild() != null ? rewrite_ebnf_AST
3697: .getFirstChild()
3698: : rewrite_ebnf_AST;
3699: currentAST.advanceChildToEnd();
3700: break;
3701: }
3702: default: {
3703: throw new NoViableAltException(LT(1), getFilename());
3704: }
3705: }
3706: }
3707: rewrite_ebnf_AST = (GrammarAST) currentAST.root;
3708: rewrite_ebnf_AST.setLine(line);
3709: rewrite_ebnf_AST.setColumn(col);
3710: } catch (RecognitionException ex) {
3711: reportError(ex);
3712: recover(ex, _tokenSet_35);
3713: }
3714: returnAST = rewrite_ebnf_AST;
3715: }
3716:
3717: public final void rewrite_tree() throws RecognitionException,
3718: TokenStreamException {
3719:
3720: returnAST = null;
3721: ASTPair currentAST = new ASTPair();
3722: GrammarAST rewrite_tree_AST = null;
3723:
3724: try { // for error handling
3725: GrammarAST tmp89_AST = null;
3726: tmp89_AST = (GrammarAST) astFactory.create(LT(1));
3727: astFactory.makeASTRoot(currentAST, tmp89_AST);
3728: match(TREE_BEGIN);
3729: rewrite_atom();
3730: astFactory.addASTChild(currentAST, returnAST);
3731: {
3732: _loop122: do {
3733: if ((_tokenSet_32.member(LA(1)))) {
3734: rewrite_element();
3735: astFactory.addASTChild(currentAST, returnAST);
3736: } else {
3737: break _loop122;
3738: }
3739:
3740: } while (true);
3741: }
3742: match(RPAREN);
3743: rewrite_tree_AST = (GrammarAST) currentAST.root;
3744: } catch (RecognitionException ex) {
3745: reportError(ex);
3746: recover(ex, _tokenSet_36);
3747: }
3748: returnAST = rewrite_tree_AST;
3749: }
3750:
3751: /** -> foo(a={...}, ...) */
3752: public final void rewrite_template_head()
3753: throws RecognitionException, TokenStreamException {
3754:
3755: returnAST = null;
3756: ASTPair currentAST = new ASTPair();
3757: GrammarAST rewrite_template_head_AST = null;
3758: Token lp = null;
3759: GrammarAST lp_AST = null;
3760:
3761: try { // for error handling
3762: id();
3763: astFactory.addASTChild(currentAST, returnAST);
3764: lp = LT(1);
3765: lp_AST = (GrammarAST) astFactory.create(lp);
3766: astFactory.makeASTRoot(currentAST, lp_AST);
3767: match(LPAREN);
3768: lp_AST.setType(TEMPLATE);
3769: lp_AST.setText("TEMPLATE");
3770: rewrite_template_args();
3771: astFactory.addASTChild(currentAST, returnAST);
3772: match(RPAREN);
3773: rewrite_template_head_AST = (GrammarAST) currentAST.root;
3774: } catch (RecognitionException ex) {
3775: reportError(ex);
3776: recover(ex, _tokenSet_37);
3777: }
3778: returnAST = rewrite_template_head_AST;
3779: }
3780:
3781: /** -> ({expr})(a={...}, ...) */
3782: public final void rewrite_indirect_template_head()
3783: throws RecognitionException, TokenStreamException {
3784:
3785: returnAST = null;
3786: ASTPair currentAST = new ASTPair();
3787: GrammarAST rewrite_indirect_template_head_AST = null;
3788: Token lp = null;
3789: GrammarAST lp_AST = null;
3790:
3791: try { // for error handling
3792: lp = LT(1);
3793: lp_AST = (GrammarAST) astFactory.create(lp);
3794: astFactory.makeASTRoot(currentAST, lp_AST);
3795: match(LPAREN);
3796: lp_AST.setType(TEMPLATE);
3797: lp_AST.setText("TEMPLATE");
3798: GrammarAST tmp92_AST = null;
3799: tmp92_AST = (GrammarAST) astFactory.create(LT(1));
3800: astFactory.addASTChild(currentAST, tmp92_AST);
3801: match(ACTION);
3802: match(RPAREN);
3803: match(LPAREN);
3804: rewrite_template_args();
3805: astFactory.addASTChild(currentAST, returnAST);
3806: match(RPAREN);
3807: rewrite_indirect_template_head_AST = (GrammarAST) currentAST.root;
3808: } catch (RecognitionException ex) {
3809: reportError(ex);
3810: recover(ex, _tokenSet_19);
3811: }
3812: returnAST = rewrite_indirect_template_head_AST;
3813: }
3814:
3815: public final void rewrite_template_args()
3816: throws RecognitionException, TokenStreamException {
3817:
3818: returnAST = null;
3819: ASTPair currentAST = new ASTPair();
3820: GrammarAST rewrite_template_args_AST = null;
3821:
3822: try { // for error handling
3823: switch (LA(1)) {
3824: case TOKEN_REF:
3825: case RULE_REF: {
3826: rewrite_template_arg();
3827: astFactory.addASTChild(currentAST, returnAST);
3828: {
3829: _loop129: do {
3830: if ((LA(1) == COMMA)) {
3831: match(COMMA);
3832: rewrite_template_arg();
3833: astFactory.addASTChild(currentAST,
3834: returnAST);
3835: } else {
3836: break _loop129;
3837: }
3838:
3839: } while (true);
3840: }
3841: rewrite_template_args_AST = (GrammarAST) currentAST.root;
3842: rewrite_template_args_AST = (GrammarAST) astFactory
3843: .make((new ASTArray(2)).add(
3844: (GrammarAST) astFactory.create(ARGLIST,
3845: "ARGLIST")).add(
3846: rewrite_template_args_AST));
3847: currentAST.root = rewrite_template_args_AST;
3848: currentAST.child = rewrite_template_args_AST != null
3849: && rewrite_template_args_AST.getFirstChild() != null ? rewrite_template_args_AST
3850: .getFirstChild()
3851: : rewrite_template_args_AST;
3852: currentAST.advanceChildToEnd();
3853: rewrite_template_args_AST = (GrammarAST) currentAST.root;
3854: break;
3855: }
3856: case RPAREN: {
3857: rewrite_template_args_AST = (GrammarAST) currentAST.root;
3858: rewrite_template_args_AST = (GrammarAST) astFactory
3859: .create(ARGLIST, "ARGLIST");
3860: currentAST.root = rewrite_template_args_AST;
3861: currentAST.child = rewrite_template_args_AST != null
3862: && rewrite_template_args_AST.getFirstChild() != null ? rewrite_template_args_AST
3863: .getFirstChild()
3864: : rewrite_template_args_AST;
3865: currentAST.advanceChildToEnd();
3866: rewrite_template_args_AST = (GrammarAST) currentAST.root;
3867: break;
3868: }
3869: default: {
3870: throw new NoViableAltException(LT(1), getFilename());
3871: }
3872: }
3873: } catch (RecognitionException ex) {
3874: reportError(ex);
3875: recover(ex, _tokenSet_38);
3876: }
3877: returnAST = rewrite_template_args_AST;
3878: }
3879:
3880: public final void rewrite_template_arg()
3881: throws RecognitionException, TokenStreamException {
3882:
3883: returnAST = null;
3884: ASTPair currentAST = new ASTPair();
3885: GrammarAST rewrite_template_arg_AST = null;
3886: Token a = null;
3887: GrammarAST a_AST = null;
3888:
3889: try { // for error handling
3890: id();
3891: astFactory.addASTChild(currentAST, returnAST);
3892: a = LT(1);
3893: a_AST = (GrammarAST) astFactory.create(a);
3894: astFactory.makeASTRoot(currentAST, a_AST);
3895: match(ASSIGN);
3896: a_AST.setType(ARG);
3897: a_AST.setText("ARG");
3898: GrammarAST tmp97_AST = null;
3899: tmp97_AST = (GrammarAST) astFactory.create(LT(1));
3900: astFactory.addASTChild(currentAST, tmp97_AST);
3901: match(ACTION);
3902: rewrite_template_arg_AST = (GrammarAST) currentAST.root;
3903: } catch (RecognitionException ex) {
3904: reportError(ex);
3905: recover(ex, _tokenSet_39);
3906: }
3907: returnAST = rewrite_template_arg_AST;
3908: }
3909:
3910: public static final String[] _tokenNames = { "<0>", "EOF", "<2>",
3911: "NULL_TREE_LOOKAHEAD", "\"options\"", "\"tokens\"",
3912: "\"parser\"", "LEXER", "RULE", "BLOCK", "OPTIONAL",
3913: "CLOSURE", "POSITIVE_CLOSURE", "SYNPRED", "RANGE",
3914: "CHAR_RANGE", "EPSILON", "ALT", "EOR", "EOB", "EOA", "ID",
3915: "ARG", "ARGLIST", "RET", "LEXER_GRAMMAR", "PARSER_GRAMMAR",
3916: "TREE_GRAMMAR", "COMBINED_GRAMMAR", "INITACTION", "LABEL",
3917: "TEMPLATE", "\"scope\"", "GATED_SEMPRED", "SYN_SEMPRED",
3918: "BACKTRACK_SEMPRED", "\"fragment\"", "ACTION",
3919: "DOC_COMMENT", "SEMI", "\"lexer\"", "\"tree\"",
3920: "\"grammar\"", "AMPERSAND", "COLON", "RCURLY", "ASSIGN",
3921: "STRING_LITERAL", "CHAR_LITERAL", "INT", "STAR",
3922: "TOKEN_REF", "\"protected\"", "\"public\"", "\"private\"",
3923: "BANG", "ARG_ACTION", "\"returns\"", "\"throws\"", "COMMA",
3924: "LPAREN", "OR", "RPAREN", "\"catch\"", "\"finally\"",
3925: "PLUS_ASSIGN", "SEMPRED", "IMPLIES", "ROOT", "RULE_REF",
3926: "NOT", "TREE_BEGIN", "QUESTION", "PLUS", "WILDCARD",
3927: "REWRITE", "DOLLAR", "DOUBLE_QUOTE_STRING_LITERAL",
3928: "DOUBLE_ANGLE_STRING_LITERAL", "WS", "COMMENT",
3929: "SL_COMMENT", "ML_COMMENT", "OPEN_ELEMENT_OPTION",
3930: "CLOSE_ELEMENT_OPTION", "ESC", "DIGIT", "XDIGIT",
3931: "NESTED_ARG_ACTION", "NESTED_ACTION",
3932: "ACTION_CHAR_LITERAL", "ACTION_STRING_LITERAL",
3933: "ACTION_ESC", "WS_LOOP", "INTERNAL_RULE_REF", "WS_OPT",
3934: "SRC" };
3935:
3936: protected void buildTokenTypeASTClassMap() {
3937: tokenTypeToASTClassMap = null;
3938: };
3939:
3940: private static final long[] mk_tokenSet_0() {
3941: long[] data = { 2L, 0L };
3942: return data;
3943: }
3944:
3945: public static final BitSet _tokenSet_0 = new BitSet(mk_tokenSet_0());
3946:
3947: private static final long[] mk_tokenSet_1() {
3948: long[] data = { 2251799813685248L, 32L, 0L, 0L };
3949: return data;
3950: }
3951:
3952: public static final BitSet _tokenSet_1 = new BitSet(mk_tokenSet_1());
3953:
3954: private static final long[] mk_tokenSet_2() {
3955: long[] data = { 9191240600534384656L, 7074L, 0L, 0L };
3956: return data;
3957: }
3958:
3959: public static final BitSet _tokenSet_2 = new BitSet(mk_tokenSet_2());
3960:
3961: private static final long[] mk_tokenSet_3() {
3962: long[] data = { 33803733376696352L, 32L, 0L, 0L };
3963: return data;
3964: }
3965:
3966: public static final BitSet _tokenSet_3 = new BitSet(mk_tokenSet_3());
3967:
3968: private static final long[] mk_tokenSet_4() {
3969: long[] data = { 33786141190651904L, 32L, 0L, 0L };
3970: return data;
3971: }
3972:
3973: public static final BitSet _tokenSet_4 = new BitSet(mk_tokenSet_4());
3974:
3975: private static final long[] mk_tokenSet_5() {
3976: long[] data = { 33786136895684608L, 32L, 0L, 0L };
3977: return data;
3978: }
3979:
3980: public static final BitSet _tokenSet_5 = new BitSet(mk_tokenSet_5());
3981:
3982: private static final long[] mk_tokenSet_6() {
3983: long[] data = { 33777340802662400L, 32L, 0L, 0L };
3984: return data;
3985: }
3986:
3987: public static final BitSet _tokenSet_6 = new BitSet(mk_tokenSet_6());
3988:
3989: private static final long[] mk_tokenSet_7() {
3990: long[] data = { 2252899325313088L, 32L, 0L, 0L };
3991: return data;
3992: }
3993:
3994: public static final BitSet _tokenSet_7 = new BitSet(mk_tokenSet_7());
3995:
3996: private static final long[] mk_tokenSet_8() {
3997: long[] data = { 17592186044416L, 0L };
3998: return data;
3999: }
4000:
4001: public static final BitSet _tokenSet_8 = new BitSet(mk_tokenSet_8());
4002:
4003: private static final long[] mk_tokenSet_9() {
4004: long[] data = { 549755813888L, 0L };
4005: return data;
4006: }
4007:
4008: public static final BitSet _tokenSet_9 = new BitSet(mk_tokenSet_9());
4009:
4010: private static final long[] mk_tokenSet_10() {
4011: long[] data = { 2286984185774080L, 0L };
4012: return data;
4013: }
4014:
4015: public static final BitSet _tokenSet_10 = new BitSet(
4016: mk_tokenSet_10());
4017:
4018: private static final long[] mk_tokenSet_11() {
4019: long[] data = { 33777340802662402L, 32L, 0L, 0L };
4020: return data;
4021: }
4022:
4023: public static final BitSet _tokenSet_11 = new BitSet(
4024: mk_tokenSet_11());
4025:
4026: private static final long[] mk_tokenSet_12() {
4027: long[] data = { 26392574033936L, 0L };
4028: return data;
4029: }
4030:
4031: public static final BitSet _tokenSet_12 = new BitSet(
4032: mk_tokenSet_12());
4033:
4034: private static final long[] mk_tokenSet_13() {
4035: long[] data = { 3461439213294059520L, 3300L, 0L, 0L };
4036: return data;
4037: }
4038:
4039: public static final BitSet _tokenSet_13 = new BitSet(
4040: mk_tokenSet_13());
4041:
4042: private static final long[] mk_tokenSet_14() {
4043: long[] data = { 26388279066624L, 0L };
4044: return data;
4045: }
4046:
4047: public static final BitSet _tokenSet_14 = new BitSet(
4048: mk_tokenSet_14());
4049:
4050: private static final long[] mk_tokenSet_15() {
4051: long[] data = { 8073124681965633536L, 3300L, 0L, 0L };
4052: return data;
4053: }
4054:
4055: public static final BitSet _tokenSet_15 = new BitSet(
4056: mk_tokenSet_15());
4057:
4058: private static final long[] mk_tokenSet_16() {
4059: long[] data = { 8182434279708442640L, 8190L, 0L, 0L };
4060: return data;
4061: }
4062:
4063: public static final BitSet _tokenSet_16 = new BitSet(
4064: mk_tokenSet_16());
4065:
4066: private static final long[] mk_tokenSet_17() {
4067: long[] data = { 8110279928647254016L, 4092L, 0L, 0L };
4068: return data;
4069: }
4070:
4071: public static final BitSet _tokenSet_17 = new BitSet(
4072: mk_tokenSet_17());
4073:
4074: private static final long[] mk_tokenSet_18() {
4075: long[] data = { 1155595654324551680L, 1252L, 0L, 0L };
4076: return data;
4077: }
4078:
4079: public static final BitSet _tokenSet_18 = new BitSet(
4080: mk_tokenSet_18());
4081:
4082: private static final long[] mk_tokenSet_19() {
4083: long[] data = { 6917529577396895744L, 2048L, 0L, 0L };
4084: return data;
4085: }
4086:
4087: public static final BitSet _tokenSet_19 = new BitSet(
4088: mk_tokenSet_19());
4089:
4090: private static final long[] mk_tokenSet_20() {
4091: long[] data = { 6917529577396895744L, 0L };
4092: return data;
4093: }
4094:
4095: public static final BitSet _tokenSet_20 = new BitSet(
4096: mk_tokenSet_20());
4097:
4098: private static final long[] mk_tokenSet_21() {
4099: long[] data = { 8073125231721447424L, 3300L, 0L, 0L };
4100: return data;
4101: }
4102:
4103: public static final BitSet _tokenSet_21 = new BitSet(
4104: mk_tokenSet_21());
4105:
4106: private static final long[] mk_tokenSet_22() {
4107: long[] data = { -9189594696052113406L, 33L, 0L, 0L };
4108: return data;
4109: }
4110:
4111: public static final BitSet _tokenSet_22 = new BitSet(
4112: mk_tokenSet_22());
4113:
4114: private static final long[] mk_tokenSet_23() {
4115: long[] data = { 2674012278751232L, 1120L, 0L, 0L };
4116: return data;
4117: }
4118:
4119: public static final BitSet _tokenSet_23 = new BitSet(
4120: mk_tokenSet_23());
4121:
4122: private static final long[] mk_tokenSet_24() {
4123: long[] data = { 8182337522685198336L, 4084L, 0L, 0L };
4124: return data;
4125: }
4126:
4127: public static final BitSet _tokenSet_24 = new BitSet(
4128: mk_tokenSet_24());
4129:
4130: private static final long[] mk_tokenSet_25() {
4131: long[] data = { 2674012278751232L, 1024L, 0L, 0L };
4132: return data;
4133: }
4134:
4135: public static final BitSet _tokenSet_25 = new BitSet(
4136: mk_tokenSet_25());
4137:
4138: private static final long[] mk_tokenSet_26() {
4139: long[] data = { 8182337522685181952L, 4084L, 0L, 0L };
4140: return data;
4141: }
4142:
4143: public static final BitSet _tokenSet_26 = new BitSet(
4144: mk_tokenSet_26());
4145:
4146: private static final long[] mk_tokenSet_27() {
4147: long[] data = { 8074251131628290048L, 4068L, 0L, 0L };
4148: return data;
4149: }
4150:
4151: public static final BitSet _tokenSet_27 = new BitSet(
4152: mk_tokenSet_27());
4153:
4154: private static final long[] mk_tokenSet_28() {
4155: long[] data = { 8073125231721447424L, 7396L, 0L, 0L };
4156: return data;
4157: }
4158:
4159: public static final BitSet _tokenSet_28 = new BitSet(
4160: mk_tokenSet_28());
4161:
4162: private static final long[] mk_tokenSet_29() {
4163: long[] data = { 8110279928647254016L, 4084L, 0L, 0L };
4164: return data;
4165: }
4166:
4167: public static final BitSet _tokenSet_29 = new BitSet(
4168: mk_tokenSet_29());
4169:
4170: private static final long[] mk_tokenSet_30() {
4171: long[] data = { 1155173441859485696L, 32L, 0L, 0L };
4172: return data;
4173: }
4174:
4175: public static final BitSet _tokenSet_30 = new BitSet(
4176: mk_tokenSet_30());
4177:
4178: private static final long[] mk_tokenSet_31() {
4179: long[] data = { 8070451219442696192L, 2048L, 0L, 0L };
4180: return data;
4181: }
4182:
4183: public static final BitSet _tokenSet_31 = new BitSet(
4184: mk_tokenSet_31());
4185:
4186: private static final long[] mk_tokenSet_32() {
4187: long[] data = { 1155595654324551680L, 4256L, 0L, 0L };
4188: return data;
4189: }
4190:
4191: public static final BitSet _tokenSet_32 = new BitSet(
4192: mk_tokenSet_32());
4193:
4194: private static final long[] mk_tokenSet_33() {
4195: long[] data = { 8146308725666217984L, 7072L, 0L, 0L };
4196: return data;
4197: }
4198:
4199: public static final BitSet _tokenSet_33 = new BitSet(
4200: mk_tokenSet_33());
4201:
4202: private static final long[] mk_tokenSet_34() {
4203: long[] data = { 1125899906842624L, 768L, 0L, 0L };
4204: return data;
4205: }
4206:
4207: public static final BitSet _tokenSet_34 = new BitSet(
4208: mk_tokenSet_34());
4209:
4210: private static final long[] mk_tokenSet_35() {
4211: long[] data = { 8073125231721447424L, 6304L, 0L, 0L };
4212: return data;
4213: }
4214:
4215: public static final BitSet _tokenSet_35 = new BitSet(
4216: mk_tokenSet_35());
4217:
4218: private static final long[] mk_tokenSet_36() {
4219: long[] data = { 8074251131628290048L, 7072L, 0L, 0L };
4220: return data;
4221: }
4222:
4223: public static final BitSet _tokenSet_36 = new BitSet(
4224: mk_tokenSet_36());
4225:
4226: private static final long[] mk_tokenSet_37() {
4227: long[] data = { 6917529577396895744L, 26624L, 0L, 0L };
4228: return data;
4229: }
4230:
4231: public static final BitSet _tokenSet_37 = new BitSet(
4232: mk_tokenSet_37());
4233:
4234: private static final long[] mk_tokenSet_38() {
4235: long[] data = { 4611686018427387904L, 0L };
4236: return data;
4237: }
4238:
4239: public static final BitSet _tokenSet_38 = new BitSet(
4240: mk_tokenSet_38());
4241:
4242: private static final long[] mk_tokenSet_39() {
4243: long[] data = { 5188146770730811392L, 0L };
4244: return data;
4245: }
4246:
4247: public static final BitSet _tokenSet_39 = new BitSet(
4248: mk_tokenSet_39());
4249:
4250: }
|