0001: // $ANTLR 2.7.5 (20050128): "oql-ojb.g" -> "OQLParser.java"$
0002:
0003: /* Copyright 2003-2005 The Apache Software Foundation
0004: *
0005: * Licensed under the Apache License, Version 2.0 (the "License");
0006: * you may not use this file except in compliance with the License.
0007: * You may obtain a copy of the License at
0008: *
0009: * http://www.apache.org/licenses/LICENSE-2.0
0010: *
0011: * Unless required by applicable law or agreed to in writing, software
0012: * distributed under the License is distributed on an "AS IS" BASIS,
0013: * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0014: * See the License for the specific language governing permissions and
0015: * limitations under the License.
0016: */
0017:
0018: package org.apache.ojb.odmg.oql;
0019:
0020: import java.util.ArrayList;
0021: import java.util.Collection;
0022: import java.util.Vector;
0023:
0024: import org.apache.ojb.broker.metadata.ClassDescriptor;
0025: import org.apache.ojb.broker.metadata.MetadataManager;
0026: import org.apache.ojb.broker.metadata.ObjectReferenceDescriptor;
0027: import org.apache.ojb.broker.query.Criteria;
0028: import org.apache.ojb.broker.query.Query;
0029: import org.apache.ojb.broker.query.QueryByCriteria;
0030: import org.apache.ojb.broker.query.QueryFactory;
0031: import org.apache.ojb.broker.util.ClassHelper;
0032:
0033: import antlr.NoViableAltException;
0034: import antlr.ParserSharedInputState;
0035: import antlr.RecognitionException;
0036: import antlr.Token;
0037: import antlr.TokenBuffer;
0038: import antlr.TokenStream;
0039: import antlr.TokenStreamException;
0040: import antlr.collections.impl.BitSet;
0041:
0042: public class OQLParser extends antlr.LLkParser implements
0043: OQLLexerTokenTypes {
0044:
0045: protected OQLParser(TokenBuffer tokenBuf, int k) {
0046: super (tokenBuf, k);
0047: tokenNames = _tokenNames;
0048: }
0049:
0050: public OQLParser(TokenBuffer tokenBuf) {
0051: this (tokenBuf, 3);
0052: }
0053:
0054: protected OQLParser(TokenStream lexer, int k) {
0055: super (lexer, k);
0056: tokenNames = _tokenNames;
0057: }
0058:
0059: public OQLParser(TokenStream lexer) {
0060: this (lexer, 3);
0061: }
0062:
0063: public OQLParser(ParserSharedInputState state) {
0064: super (state, 3);
0065: tokenNames = _tokenNames;
0066: }
0067:
0068: public final Query buildQuery() throws RecognitionException,
0069: TokenStreamException {
0070: Query query = null;
0071:
0072: try { // for error handling
0073: query = selectQuery();
0074: {
0075: if ((LA(1) == TOK_SEMIC)) {
0076: match(TOK_SEMIC);
0077: } else if ((LA(1) == EOF)) {
0078: } else {
0079: throw new NoViableAltException(LT(1), getFilename());
0080: }
0081:
0082: }
0083: } catch (RecognitionException ex) {
0084: reportError(ex);
0085: recover(ex, _tokenSet_0);
0086: }
0087: return query;
0088: }
0089:
0090: public final QueryByCriteria selectQuery()
0091: throws RecognitionException, TokenStreamException {
0092: QueryByCriteria query = null;
0093:
0094: try { // for error handling
0095:
0096: Class clazz = null;
0097: Criteria criteria = new Criteria();
0098: String[] projectionAttrs;
0099: boolean distinct = false;
0100:
0101: match(LITERAL_select);
0102: {
0103: if ((LA(1) == LITERAL_distinct)) {
0104: match(LITERAL_distinct);
0105:
0106: distinct = true;
0107:
0108: } else if ((LA(1) == TOK_STAR || LA(1) == Identifier)) {
0109: } else {
0110: throw new NoViableAltException(LT(1), getFilename());
0111: }
0112:
0113: }
0114: projectionAttrs = projectionAttributes();
0115: match(LITERAL_from);
0116: clazz = fromClause();
0117: {
0118: if ((LA(1) == LITERAL_where)) {
0119: match(LITERAL_where);
0120: whereClause(criteria);
0121: } else if ((_tokenSet_1.member(LA(1)))) {
0122: } else {
0123: throw new NoViableAltException(LT(1), getFilename());
0124: }
0125:
0126: }
0127:
0128: if (clazz != null) {
0129: if (projectionAttrs[0].indexOf('.') < 0) {
0130: query = QueryFactory.newQuery(clazz, criteria,
0131: distinct);
0132: } else {
0133: ClassDescriptor cld = MetadataManager.getInstance()
0134: .getRepository().getDescriptorFor(clazz);
0135: for (int i = 0; i < projectionAttrs.length; i++) {
0136: projectionAttrs[i] = projectionAttrs[i]
0137: .substring(projectionAttrs[i]
0138: .indexOf('.') + 1);
0139: }
0140:
0141: ArrayList descs = cld
0142: .getAttributeDescriptorsForPath(projectionAttrs[0]);
0143: int pathLen = descs.size();
0144:
0145: if ((pathLen > 0)
0146: && (descs.get(pathLen - 1) instanceof ObjectReferenceDescriptor)) {
0147: ObjectReferenceDescriptor ord = ((ObjectReferenceDescriptor) descs
0148: .get(pathLen - 1));
0149: query = QueryFactory.newQuery(clazz, criteria,
0150: distinct);
0151: query.setObjectProjectionAttribute(
0152: projectionAttrs[0], ord.getItemClass());
0153: } else {
0154: query = QueryFactory.newReportQuery(clazz,
0155: projectionAttrs, criteria, distinct);
0156: }
0157: }
0158: }
0159:
0160: {
0161: if ((LA(1) == LITERAL_order)) {
0162: match(LITERAL_order);
0163: match(LITERAL_by);
0164: orderClause(query);
0165: } else if ((_tokenSet_2.member(LA(1)))) {
0166: } else {
0167: throw new NoViableAltException(LT(1), getFilename());
0168: }
0169:
0170: }
0171: {
0172: if ((LA(1) == LITERAL_group)) {
0173: match(LITERAL_group);
0174: match(LITERAL_by);
0175: groupClause(query);
0176: } else if ((_tokenSet_3.member(LA(1)))) {
0177: } else {
0178: throw new NoViableAltException(LT(1), getFilename());
0179: }
0180:
0181: }
0182: {
0183: if ((LA(1) == LITERAL_prefetch)) {
0184: match(LITERAL_prefetch);
0185: prefetchClause(query);
0186: } else if ((LA(1) == EOF || LA(1) == TOK_RPAREN || LA(1) == TOK_SEMIC)) {
0187: } else {
0188: throw new NoViableAltException(LT(1), getFilename());
0189: }
0190:
0191: }
0192: } catch (RecognitionException ex) {
0193: reportError(ex);
0194: recover(ex, _tokenSet_4);
0195: }
0196: return query;
0197: }
0198:
0199: public final String[] projectionAttributes()
0200: throws RecognitionException, TokenStreamException {
0201: String[] projectionAttrs = null;
0202:
0203: Token id = null;
0204: Token id1 = null;
0205:
0206: try { // for error handling
0207:
0208: String first = null;
0209: ArrayList list = null;
0210:
0211: {
0212: if ((LA(1) == Identifier)) {
0213: id = LT(1);
0214: match(Identifier);
0215: {
0216:
0217: first = id.getText();
0218:
0219: }
0220: {
0221: _loop80: do {
0222: if ((LA(1) == TOK_COMMA)) {
0223: match(TOK_COMMA);
0224: id1 = LT(1);
0225: match(Identifier);
0226: {
0227:
0228: if (list == null) {
0229: list = new ArrayList();
0230: list.add(first);
0231: }
0232: list.add(id1.getText());
0233:
0234: }
0235: } else {
0236: break _loop80;
0237: }
0238:
0239: } while (true);
0240: }
0241: } else if ((LA(1) == TOK_STAR)) {
0242: match(TOK_STAR);
0243: } else {
0244: throw new NoViableAltException(LT(1), getFilename());
0245: }
0246:
0247: }
0248:
0249: if (list == null) {
0250: projectionAttrs = new String[] { first };
0251: } else {
0252: projectionAttrs = (String[]) list
0253: .toArray(new String[list.size()]);
0254: }
0255:
0256: } catch (RecognitionException ex) {
0257: reportError(ex);
0258: recover(ex, _tokenSet_5);
0259: }
0260: return projectionAttrs;
0261: }
0262:
0263: public final Class fromClause() throws RecognitionException,
0264: TokenStreamException {
0265: Class clazz = null;
0266:
0267: Token id = null;
0268:
0269: try { // for error handling
0270: id = LT(1);
0271: match(Identifier);
0272:
0273: try {
0274: clazz = ClassHelper.getClass(id.getText());
0275: } catch (Exception e) {
0276: }
0277:
0278: } catch (RecognitionException ex) {
0279: reportError(ex);
0280: recover(ex, _tokenSet_6);
0281: }
0282: return clazz;
0283: }
0284:
0285: public final void whereClause(Criteria criteria)
0286: throws RecognitionException, TokenStreamException {
0287:
0288: try { // for error handling
0289: orExpr(criteria);
0290: } catch (RecognitionException ex) {
0291: reportError(ex);
0292: recover(ex, _tokenSet_7);
0293: }
0294: }
0295:
0296: public final void orderClause(QueryByCriteria query)
0297: throws RecognitionException, TokenStreamException {
0298:
0299: try { // for error handling
0300: sortCriterion(query);
0301: {
0302: _loop83: do {
0303: if ((LA(1) == TOK_COMMA)) {
0304: match(TOK_COMMA);
0305: sortCriterion(query);
0306: } else {
0307: break _loop83;
0308: }
0309:
0310: } while (true);
0311: }
0312: } catch (RecognitionException ex) {
0313: reportError(ex);
0314: recover(ex, _tokenSet_2);
0315: }
0316: }
0317:
0318: public final void groupClause(QueryByCriteria query)
0319: throws RecognitionException, TokenStreamException {
0320:
0321: try { // for error handling
0322: groupCriterion(query);
0323: {
0324: _loop89: do {
0325: if ((LA(1) == TOK_COMMA)) {
0326: match(TOK_COMMA);
0327: groupCriterion(query);
0328: } else {
0329: break _loop89;
0330: }
0331:
0332: } while (true);
0333: }
0334: } catch (RecognitionException ex) {
0335: reportError(ex);
0336: recover(ex, _tokenSet_3);
0337: }
0338: }
0339:
0340: public final void prefetchClause(QueryByCriteria query)
0341: throws RecognitionException, TokenStreamException {
0342:
0343: try { // for error handling
0344: prefetchCriterion(query);
0345: {
0346: _loop93: do {
0347: if ((LA(1) == TOK_COMMA)) {
0348: match(TOK_COMMA);
0349: prefetchCriterion(query);
0350: } else {
0351: break _loop93;
0352: }
0353:
0354: } while (true);
0355: }
0356: } catch (RecognitionException ex) {
0357: reportError(ex);
0358: recover(ex, _tokenSet_4);
0359: }
0360: }
0361:
0362: public final Query existsQuery() throws RecognitionException,
0363: TokenStreamException {
0364: Query query = null;
0365:
0366: try { // for error handling
0367:
0368: Class clazz = null;
0369: Criteria criteria = new Criteria();
0370:
0371: match(LITERAL_exists);
0372: projectionAttributes();
0373: match(LITERAL_in);
0374: clazz = fromClause();
0375: {
0376: if ((LA(1) == TOK_COLON)) {
0377: match(TOK_COLON);
0378: whereClause(criteria);
0379: } else if ((_tokenSet_7.member(LA(1)))) {
0380: } else {
0381: throw new NoViableAltException(LT(1), getFilename());
0382: }
0383:
0384: }
0385:
0386: if (clazz != null) {
0387: query = QueryFactory.newQuery(clazz, criteria);
0388: }
0389:
0390: } catch (RecognitionException ex) {
0391: reportError(ex);
0392: recover(ex, _tokenSet_7);
0393: }
0394: return query;
0395: }
0396:
0397: public final void orExpr(Criteria criteria)
0398: throws RecognitionException, TokenStreamException {
0399:
0400: try { // for error handling
0401: andExpr(criteria);
0402: {
0403: _loop97: do {
0404: if ((LA(1) == LITERAL_or)
0405: && (_tokenSet_8.member(LA(2)))
0406: && (_tokenSet_9.member(LA(3)))) {
0407: match(LITERAL_or);
0408: Criteria orCriteria = new Criteria();
0409: andExpr(orCriteria);
0410: criteria.addOrCriteria(orCriteria);
0411: } else {
0412: break _loop97;
0413: }
0414:
0415: } while (true);
0416: }
0417: } catch (RecognitionException ex) {
0418: reportError(ex);
0419: recover(ex, _tokenSet_7);
0420: }
0421: }
0422:
0423: public final void sortCriterion(QueryByCriteria query)
0424: throws RecognitionException, TokenStreamException {
0425:
0426: Token id = null;
0427:
0428: try { // for error handling
0429: boolean descending = false;
0430: id = LT(1);
0431: match(Identifier);
0432: {
0433: if ((LA(1) == LITERAL_asc || LA(1) == LITERAL_desc)) {
0434: {
0435: if ((LA(1) == LITERAL_asc)) {
0436: match(LITERAL_asc);
0437: descending = false;
0438: } else if ((LA(1) == LITERAL_desc)) {
0439: match(LITERAL_desc);
0440: descending = true;
0441: } else {
0442: throw new NoViableAltException(LT(1),
0443: getFilename());
0444: }
0445:
0446: }
0447: } else if ((_tokenSet_10.member(LA(1)))) {
0448: } else {
0449: throw new NoViableAltException(LT(1), getFilename());
0450: }
0451:
0452: }
0453:
0454: if (descending) {
0455: query.addOrderByDescending(id.getText());
0456: } else {
0457: query.addOrderByAscending(id.getText());
0458: }
0459:
0460: } catch (RecognitionException ex) {
0461: reportError(ex);
0462: recover(ex, _tokenSet_10);
0463: }
0464: }
0465:
0466: public final void groupCriterion(QueryByCriteria query)
0467: throws RecognitionException, TokenStreamException {
0468:
0469: Token id = null;
0470:
0471: try { // for error handling
0472: id = LT(1);
0473: match(Identifier);
0474:
0475: query.addGroupBy(id.getText());
0476:
0477: } catch (RecognitionException ex) {
0478: reportError(ex);
0479: recover(ex, _tokenSet_11);
0480: }
0481: }
0482:
0483: public final void prefetchCriterion(QueryByCriteria query)
0484: throws RecognitionException, TokenStreamException {
0485:
0486: Token id = null;
0487:
0488: try { // for error handling
0489: id = LT(1);
0490: match(Identifier);
0491:
0492: query.addPrefetchedRelationship(id.getText());
0493:
0494: } catch (RecognitionException ex) {
0495: reportError(ex);
0496: recover(ex, _tokenSet_12);
0497: }
0498: }
0499:
0500: public final void andExpr(Criteria criteria)
0501: throws RecognitionException, TokenStreamException {
0502:
0503: try { // for error handling
0504: quantifierExpr(criteria);
0505: {
0506: _loop100: do {
0507: if ((LA(1) == LITERAL_and)
0508: && (_tokenSet_8.member(LA(2)))
0509: && (_tokenSet_9.member(LA(3)))) {
0510: match(LITERAL_and);
0511: Criteria andCriteria = new Criteria();
0512: quantifierExpr(andCriteria);
0513: criteria.addAndCriteria(andCriteria);
0514: } else {
0515: break _loop100;
0516: }
0517:
0518: } while (true);
0519: }
0520: } catch (RecognitionException ex) {
0521: reportError(ex);
0522: recover(ex, _tokenSet_7);
0523: }
0524: }
0525:
0526: public final void quantifierExpr(Criteria criteria)
0527: throws RecognitionException, TokenStreamException {
0528:
0529: try { // for error handling
0530: switch (LA(1)) {
0531: case TOK_LPAREN: {
0532: match(TOK_LPAREN);
0533: orExpr(criteria);
0534: match(TOK_RPAREN);
0535: break;
0536: }
0537: case LITERAL_is_undefined:
0538: case LITERAL_is_defined: {
0539: undefinedExpr(criteria);
0540: break;
0541: }
0542: case LITERAL_exists:
0543: case LITERAL_not: {
0544: existsExpr(criteria);
0545: break;
0546: }
0547: default:
0548: if ((LA(1) == Identifier)
0549: && (_tokenSet_13.member(LA(2)))) {
0550: equalityExpr(criteria);
0551: } else if ((LA(1) == Identifier)
0552: && (LA(2) == LITERAL_not || LA(2) == LITERAL_like)
0553: && (_tokenSet_14.member(LA(3)))) {
0554: likeExpr(criteria);
0555: } else if ((LA(1) == Identifier)
0556: && (LA(2) == LITERAL_not || LA(2) == LITERAL_between)
0557: && (_tokenSet_15.member(LA(3)))) {
0558: betweenExpr(criteria);
0559: } else if ((LA(1) == Identifier)
0560: && (LA(2) == LITERAL_in || LA(2) == LITERAL_not)
0561: && (LA(3) == TOK_LPAREN || LA(3) == LITERAL_in || LA(3) == LITERAL_list)) {
0562: inExpr(criteria);
0563: } else {
0564: throw new NoViableAltException(LT(1), getFilename());
0565: }
0566: }
0567: } catch (RecognitionException ex) {
0568: reportError(ex);
0569: recover(ex, _tokenSet_7);
0570: }
0571: }
0572:
0573: public final void equalityExpr(Criteria criteria)
0574: throws RecognitionException, TokenStreamException {
0575:
0576: Token id = null;
0577:
0578: try { // for error handling
0579:
0580: Object value = null;
0581:
0582: id = LT(1);
0583: match(Identifier);
0584: {
0585: {
0586: switch (LA(1)) {
0587: case TOK_EQ: {
0588: match(TOK_EQ);
0589: {
0590: if ((LA(1) == LITERAL_nil)) {
0591: match(LITERAL_nil);
0592: criteria.addIsNull(id.getText());
0593: } else if ((_tokenSet_16.member(LA(1)))) {
0594: value = literal();
0595: criteria
0596: .addEqualTo(id.getText(), value);
0597: } else {
0598: throw new NoViableAltException(LT(1),
0599: getFilename());
0600: }
0601:
0602: }
0603: break;
0604: }
0605: case TOK_NE: {
0606: match(TOK_NE);
0607: {
0608: if ((LA(1) == LITERAL_nil)) {
0609: match(LITERAL_nil);
0610: criteria.addNotNull(id.getText());
0611: } else if ((_tokenSet_16.member(LA(1)))) {
0612: value = literal();
0613: criteria.addNotEqualTo(id.getText(),
0614: value);
0615: } else {
0616: throw new NoViableAltException(LT(1),
0617: getFilename());
0618: }
0619:
0620: }
0621: break;
0622: }
0623: case TOK_NE2: {
0624: match(TOK_NE2);
0625: {
0626: if ((LA(1) == LITERAL_nil)) {
0627: match(LITERAL_nil);
0628: criteria.addNotNull(id.getText());
0629: } else if ((_tokenSet_16.member(LA(1)))) {
0630: value = literal();
0631: criteria.addNotEqualTo(id.getText(),
0632: value);
0633: } else {
0634: throw new NoViableAltException(LT(1),
0635: getFilename());
0636: }
0637:
0638: }
0639: break;
0640: }
0641: case TOK_LT: {
0642: match(TOK_LT);
0643: value = literal();
0644: criteria.addLessThan(id.getText(), value);
0645: break;
0646: }
0647: case TOK_GT: {
0648: match(TOK_GT);
0649: value = literal();
0650: criteria.addGreaterThan(id.getText(), value);
0651: break;
0652: }
0653: case TOK_LE: {
0654: match(TOK_LE);
0655: value = literal();
0656: criteria
0657: .addLessOrEqualThan(id.getText(), value);
0658: break;
0659: }
0660: case TOK_GE: {
0661: match(TOK_GE);
0662: value = literal();
0663: criteria.addGreaterOrEqualThan(id.getText(),
0664: value);
0665: break;
0666: }
0667: default: {
0668: throw new NoViableAltException(LT(1),
0669: getFilename());
0670: }
0671: }
0672: }
0673: }
0674: } catch (RecognitionException ex) {
0675: reportError(ex);
0676: recover(ex, _tokenSet_7);
0677: }
0678: }
0679:
0680: public final void likeExpr(Criteria criteria)
0681: throws RecognitionException, TokenStreamException {
0682:
0683: Token id = null;
0684:
0685: try { // for error handling
0686:
0687: boolean negative = false;
0688: Object value = null;
0689:
0690: id = LT(1);
0691: match(Identifier);
0692: {
0693: if ((LA(1) == LITERAL_not)) {
0694: match(LITERAL_not);
0695: negative = true;
0696: } else if ((LA(1) == LITERAL_like)) {
0697: } else {
0698: throw new NoViableAltException(LT(1), getFilename());
0699: }
0700:
0701: }
0702: match(LITERAL_like);
0703: value = literal();
0704:
0705: if (negative) {
0706: criteria.addNotLike(id.getText(), value);
0707: } else {
0708: criteria.addLike(id.getText(), value);
0709: }
0710:
0711: } catch (RecognitionException ex) {
0712: reportError(ex);
0713: recover(ex, _tokenSet_7);
0714: }
0715: }
0716:
0717: public final void undefinedExpr(Criteria criteria)
0718: throws RecognitionException, TokenStreamException {
0719:
0720: Token id = null;
0721:
0722: try { // for error handling
0723: boolean negative = false;
0724: {
0725: if ((LA(1) == LITERAL_is_undefined)) {
0726: match(LITERAL_is_undefined);
0727: negative = false;
0728: } else if ((LA(1) == LITERAL_is_defined)) {
0729: match(LITERAL_is_defined);
0730: negative = true;
0731: } else {
0732: throw new NoViableAltException(LT(1), getFilename());
0733: }
0734:
0735: }
0736: match(TOK_LPAREN);
0737: id = LT(1);
0738: match(Identifier);
0739: match(TOK_RPAREN);
0740:
0741: if (negative) {
0742: criteria.addNotNull(id.getText());
0743: } else {
0744: criteria.addIsNull(id.getText());
0745: }
0746:
0747: } catch (RecognitionException ex) {
0748: reportError(ex);
0749: recover(ex, _tokenSet_7);
0750: }
0751: }
0752:
0753: public final void betweenExpr(Criteria criteria)
0754: throws RecognitionException, TokenStreamException {
0755:
0756: Token id = null;
0757:
0758: try { // for error handling
0759:
0760: boolean negative = false;
0761: Object lower = null;
0762: Object upper = null;
0763:
0764: id = LT(1);
0765: match(Identifier);
0766: {
0767: if ((LA(1) == LITERAL_not)) {
0768: match(LITERAL_not);
0769: negative = true;
0770: } else if ((LA(1) == LITERAL_between)) {
0771: } else {
0772: throw new NoViableAltException(LT(1), getFilename());
0773: }
0774:
0775: }
0776: match(LITERAL_between);
0777: lower = literal();
0778: match(LITERAL_and);
0779: upper = literal();
0780:
0781: if (negative) {
0782: criteria.addNotBetween(id.getText(), lower, upper);
0783: } else {
0784: criteria.addBetween(id.getText(), lower, upper);
0785: }
0786:
0787: } catch (RecognitionException ex) {
0788: reportError(ex);
0789: recover(ex, _tokenSet_7);
0790: }
0791: }
0792:
0793: public final void inExpr(Criteria criteria)
0794: throws RecognitionException, TokenStreamException {
0795:
0796: Token id = null;
0797:
0798: try { // for error handling
0799:
0800: boolean negative = false;
0801: Collection coll;
0802:
0803: id = LT(1);
0804: match(Identifier);
0805: {
0806: if ((LA(1) == LITERAL_not)) {
0807: match(LITERAL_not);
0808: negative = true;
0809: } else if ((LA(1) == LITERAL_in)) {
0810: } else {
0811: throw new NoViableAltException(LT(1), getFilename());
0812: }
0813:
0814: }
0815: match(LITERAL_in);
0816: {
0817: if ((LA(1) == LITERAL_list)) {
0818: match(LITERAL_list);
0819: } else if ((LA(1) == TOK_LPAREN)) {
0820: } else {
0821: throw new NoViableAltException(LT(1), getFilename());
0822: }
0823:
0824: }
0825: coll = argList();
0826:
0827: if (negative) {
0828: criteria.addNotIn(id.getText(), coll);
0829: } else {
0830: criteria.addIn(id.getText(), coll);
0831: }
0832:
0833: } catch (RecognitionException ex) {
0834: reportError(ex);
0835: recover(ex, _tokenSet_7);
0836: }
0837: }
0838:
0839: public final void existsExpr(Criteria criteria)
0840: throws RecognitionException, TokenStreamException {
0841:
0842: try { // for error handling
0843:
0844: Query subQuery = null;
0845: boolean negative = false;
0846:
0847: {
0848: if ((LA(1) == LITERAL_not)) {
0849: match(LITERAL_not);
0850: negative = true;
0851: } else if ((LA(1) == LITERAL_exists)) {
0852: } else {
0853: throw new NoViableAltException(LT(1), getFilename());
0854: }
0855:
0856: }
0857: {
0858: if ((LA(1) == LITERAL_exists) && (LA(2) == TOK_LPAREN)) {
0859: match(LITERAL_exists);
0860: match(TOK_LPAREN);
0861: subQuery = selectQuery();
0862: match(TOK_RPAREN);
0863: } else if ((LA(1) == LITERAL_exists)
0864: && (LA(2) == TOK_STAR || LA(2) == Identifier)) {
0865: subQuery = existsQuery();
0866: } else {
0867: throw new NoViableAltException(LT(1), getFilename());
0868: }
0869:
0870: }
0871:
0872: if (negative) {
0873: criteria.addNotExists(subQuery);
0874: } else {
0875: criteria.addExists(subQuery);
0876: }
0877:
0878: } catch (RecognitionException ex) {
0879: reportError(ex);
0880: recover(ex, _tokenSet_7);
0881: }
0882: }
0883:
0884: public final Object literal() throws RecognitionException,
0885: TokenStreamException {
0886: Object value = null;
0887:
0888: Token tokInt = null;
0889: Token tokADbl = null;
0890: Token tokEDbl = null;
0891: Token tokChar = null;
0892: Token tokStr = null;
0893: Token tokDate = null;
0894: Token tokTime = null;
0895: Token tokTs = null;
0896:
0897: try { // for error handling
0898: switch (LA(1)) {
0899: case TOK_DOLLAR: {
0900: match(TOK_DOLLAR);
0901: match(TOK_UNSIGNED_INTEGER);
0902: value = null;
0903: break;
0904: }
0905: case LITERAL_true: {
0906: match(LITERAL_true);
0907: value = Boolean.TRUE;
0908: break;
0909: }
0910: case LITERAL_false: {
0911: match(LITERAL_false);
0912: value = Boolean.FALSE;
0913: break;
0914: }
0915: case TOK_UNSIGNED_INTEGER: {
0916: tokInt = LT(1);
0917: match(TOK_UNSIGNED_INTEGER);
0918:
0919: try {
0920: value = Integer.valueOf(tokInt.getText());
0921: } catch (NumberFormatException ignored) {
0922: value = Long.valueOf(tokInt.getText());
0923: }
0924:
0925: break;
0926: }
0927: case TOK_APPROXIMATE_NUMERIC_LITERAL: {
0928: tokADbl = LT(1);
0929: match(TOK_APPROXIMATE_NUMERIC_LITERAL);
0930: value = Double.valueOf(tokADbl.getText());
0931: break;
0932: }
0933: case TOK_EXACT_NUMERIC_LITERAL: {
0934: tokEDbl = LT(1);
0935: match(TOK_EXACT_NUMERIC_LITERAL);
0936: value = Double.valueOf(tokEDbl.getText());
0937: break;
0938: }
0939: case CharLiteral: {
0940: tokChar = LT(1);
0941: match(CharLiteral);
0942: value = new Character(tokChar.getText().charAt(0));
0943: break;
0944: }
0945: case StringLiteral: {
0946: tokStr = LT(1);
0947: match(StringLiteral);
0948: value = tokStr.getText();
0949: break;
0950: }
0951: case LITERAL_date: {
0952: match(LITERAL_date);
0953: tokDate = LT(1);
0954: match(StringLiteral);
0955: value = java.sql.Date.valueOf(tokDate.getText());
0956: break;
0957: }
0958: case LITERAL_time: {
0959: match(LITERAL_time);
0960: tokTime = LT(1);
0961: match(StringLiteral);
0962: value = java.sql.Time.valueOf(tokTime.getText());
0963: break;
0964: }
0965: case LITERAL_timestamp: {
0966: match(LITERAL_timestamp);
0967: tokTs = LT(1);
0968: match(StringLiteral);
0969: value = java.sql.Timestamp.valueOf(tokTs.getText());
0970: break;
0971: }
0972: default: {
0973: throw new NoViableAltException(LT(1), getFilename());
0974: }
0975: }
0976: } catch (RecognitionException ex) {
0977: reportError(ex);
0978: recover(ex, _tokenSet_17);
0979: }
0980: return value;
0981: }
0982:
0983: public final Collection argList() throws RecognitionException,
0984: TokenStreamException {
0985: Collection coll = null;
0986:
0987: try { // for error handling
0988:
0989: Collection temp = new Vector();
0990: Object val;
0991:
0992: match(TOK_LPAREN);
0993: {
0994: if ((_tokenSet_16.member(LA(1)))) {
0995: val = literal();
0996: if (val != null) {
0997: temp.add(val);
0998: }
0999: {
1000: _loop124: do {
1001: if ((LA(1) == TOK_COMMA)) {
1002: match(TOK_COMMA);
1003: val = literal();
1004: if (val != null) {
1005: temp.add(val);
1006: }
1007: } else {
1008: break _loop124;
1009: }
1010:
1011: } while (true);
1012: }
1013: } else if ((LA(1) == TOK_RPAREN)) {
1014: } else {
1015: throw new NoViableAltException(LT(1), getFilename());
1016: }
1017:
1018: }
1019: match(TOK_RPAREN);
1020:
1021: if (!temp.isEmpty()) {
1022: coll = temp;
1023: }
1024:
1025: } catch (RecognitionException ex) {
1026: reportError(ex);
1027: recover(ex, _tokenSet_7);
1028: }
1029: return coll;
1030: }
1031:
1032: public static final String[] _tokenNames = { "<0>", "EOF", "<2>",
1033: "NULL_TREE_LOOKAHEAD", "right parenthesis",
1034: "left parenthesis", "comma", "semicolon", "TOK_COLON",
1035: "dot", "dot", "operator", "comparison operator",
1036: "operator", "operator", "operator", "operator",
1037: "comparison operator", "comparison operator",
1038: "comparison operator", "comparison operator",
1039: "comparison operator", "comparison operator",
1040: "left bracket", "right bracket", "TOK_DOLLAR",
1041: "NameFirstCharacter", "NameCharacter", "Identifier",
1042: "TOK_UNSIGNED_INTEGER", "TOK_APPROXIMATE_NUMERIC_LITERAL",
1043: "numeric value", "character string", "character string",
1044: "WhiteSpace", "NewLine", "CommentLine", "MultiLineComment",
1045: "\"select\"", "\"distinct\"", "\"from\"", "\"where\"",
1046: "\"order\"", "\"by\"", "\"group\"", "\"prefetch\"",
1047: "\"exists\"", "\"in\"", "\"asc\"", "\"desc\"", "\"or\"",
1048: "\"and\"", "\"nil\"", "\"not\"", "\"list\"", "\"between\"",
1049: "\"is_undefined\"", "\"is_defined\"", "\"like\"",
1050: "\"true\"", "\"false\"", "\"date\"", "\"time\"",
1051: "\"timestamp\"" };
1052:
1053: private static final long[] mk_tokenSet_0() {
1054: long[] data = { 2L, 0L };
1055: return data;
1056: }
1057:
1058: public static final BitSet _tokenSet_0 = new BitSet(mk_tokenSet_0());
1059:
1060: private static final long[] mk_tokenSet_1() {
1061: long[] data = { 57174604644498L, 0L };
1062: return data;
1063: }
1064:
1065: public static final BitSet _tokenSet_1 = new BitSet(mk_tokenSet_1());
1066:
1067: private static final long[] mk_tokenSet_2() {
1068: long[] data = { 52776558133394L, 0L };
1069: return data;
1070: }
1071:
1072: public static final BitSet _tokenSet_2 = new BitSet(mk_tokenSet_2());
1073:
1074: private static final long[] mk_tokenSet_3() {
1075: long[] data = { 35184372088978L, 0L };
1076: return data;
1077: }
1078:
1079: public static final BitSet _tokenSet_3 = new BitSet(mk_tokenSet_3());
1080:
1081: private static final long[] mk_tokenSet_4() {
1082: long[] data = { 146L, 0L };
1083: return data;
1084: }
1085:
1086: public static final BitSet _tokenSet_4 = new BitSet(mk_tokenSet_4());
1087:
1088: private static final long[] mk_tokenSet_5() {
1089: long[] data = { 141836999983104L, 0L };
1090: return data;
1091: }
1092:
1093: public static final BitSet _tokenSet_5 = new BitSet(mk_tokenSet_5());
1094:
1095: private static final long[] mk_tokenSet_6() {
1096: long[] data = { 3437073348428178L, 0L };
1097: return data;
1098: }
1099:
1100: public static final BitSet _tokenSet_6 = new BitSet(mk_tokenSet_6());
1101:
1102: private static final long[] mk_tokenSet_7() {
1103: long[] data = { 3434874325172370L, 0L };
1104: return data;
1105: }
1106:
1107: public static final BitSet _tokenSet_7 = new BitSet(mk_tokenSet_7());
1108:
1109: private static final long[] mk_tokenSet_8() {
1110: long[] data = { 225250350381137952L, 0L };
1111: return data;
1112: }
1113:
1114: public static final BitSet _tokenSet_8 = new BitSet(mk_tokenSet_8());
1115:
1116: private static final long[] mk_tokenSet_9() {
1117: long[] data = { 549650261048496160L, 0L };
1118: return data;
1119: }
1120:
1121: public static final BitSet _tokenSet_9 = new BitSet(mk_tokenSet_9());
1122:
1123: private static final long[] mk_tokenSet_10() {
1124: long[] data = { 52776558133458L, 0L };
1125: return data;
1126: }
1127:
1128: public static final BitSet _tokenSet_10 = new BitSet(
1129: mk_tokenSet_10());
1130:
1131: private static final long[] mk_tokenSet_11() {
1132: long[] data = { 35184372089042L, 0L };
1133: return data;
1134: }
1135:
1136: public static final BitSet _tokenSet_11 = new BitSet(
1137: mk_tokenSet_11());
1138:
1139: private static final long[] mk_tokenSet_12() {
1140: long[] data = { 210L, 0L };
1141: return data;
1142: }
1143:
1144: public static final BitSet _tokenSet_12 = new BitSet(
1145: mk_tokenSet_12());
1146:
1147: private static final long[] mk_tokenSet_13() {
1148: long[] data = { 8261632L, 0L };
1149: return data;
1150: }
1151:
1152: public static final BitSet _tokenSet_13 = new BitSet(
1153: mk_tokenSet_13());
1154:
1155: private static final long[] mk_tokenSet_14() {
1156: long[] data = { -288230359475159040L, 0L };
1157: return data;
1158: }
1159:
1160: public static final BitSet _tokenSet_14 = new BitSet(
1161: mk_tokenSet_14());
1162:
1163: private static final long[] mk_tokenSet_15() {
1164: long[] data = { -540431938607906816L, 0L };
1165: return data;
1166: }
1167:
1168: public static final BitSet _tokenSet_15 = new BitSet(
1169: mk_tokenSet_15());
1170:
1171: private static final long[] mk_tokenSet_16() {
1172: long[] data = { -576460735626870784L, 0L };
1173: return data;
1174: }
1175:
1176: public static final BitSet _tokenSet_16 = new BitSet(
1177: mk_tokenSet_16());
1178:
1179: private static final long[] mk_tokenSet_17() {
1180: long[] data = { 3434874325172434L, 0L };
1181: return data;
1182: }
1183:
1184: public static final BitSet _tokenSet_17 = new BitSet(
1185: mk_tokenSet_17());
1186:
1187: }
|