001: /*
002: * SQLTokenMarker.java - Generic SQL token marker
003: * Copyright (C) 1999 mike dillon
004: *
005: * You may use and modify this package for any purpose. Redistribution is
006: * permitted, in both source and binary form, provided that this notice
007: * remains intact in all source distributions of this package.
008: */
009: package workbench.gui.editor;
010:
011: import javax.swing.text.Segment;
012:
013: /**
014: * SQL token marker.
015: *
016: * @author mike dillon
017: */
018: public class SQLTokenMarker extends TokenMarker {
019: private int offset, lastOffset, lastKeyword;
020: protected boolean isMySql = false;
021: protected KeywordMap keywords;
022: private char literalChar = 0;
023:
024: public SQLTokenMarker() {
025: }
026:
027: public char getPendingLiteralChar() {
028: return literalChar;
029: }
030:
031: private static byte getLiteralId(char literal) {
032: switch (literal) {
033: case '\'':
034: return Token.LITERAL1;
035: case '"':
036: case '[':
037: case ']':
038: return Token.LITERAL2;
039: }
040: return 0;
041: }
042:
043: public void markTokensImpl(Token lastToken, Segment line,
044: int lineIndex) {
045: byte token = Token.NULL;
046: this .literalChar = (lastToken == null ? 0 : lastToken
047: .getPendingLiteralChar());
048:
049: if (literalChar != 0) {
050: token = getLiteralId(literalChar);
051: }
052:
053: char[] array = line.array;
054: offset = lastOffset = lastKeyword = line.offset;
055: int currentLength = line.count + offset;
056:
057: loop: for (int i = offset; i < currentLength; i++) {
058: int i1 = i + 1;
059: switch (array[i]) {
060: case '*':
061: if (token == Token.COMMENT1 && currentLength - i >= 1
062: && array[i1] == '/') {
063: token = Token.NULL;
064: i++;
065: addToken(lineIndex, (i + 1) - lastOffset,
066: Token.COMMENT1);
067: lastOffset = i + 1;
068: } else if (token == Token.NULL) {
069: searchBack(lineIndex, line, i);
070: addToken(lineIndex, 1, Token.OPERATOR);
071: lastOffset = i + 1;
072: }
073: break;
074: case '[':
075: if (token == Token.NULL) {
076: searchBack(lineIndex, line, i);
077: token = Token.LITERAL2;
078: literalChar = '[';
079: lastOffset = i;
080: }
081: break;
082: case ']':
083: if (token == Token.LITERAL2 && literalChar == '[') {
084: token = Token.NULL;
085: literalChar = 0;
086: addToken(lineIndex, i1 - lastOffset, Token.LITERAL2);
087: lastOffset = i + 1;
088: }
089: break;
090: case '.':
091: case ',':
092: case '(':
093: case ')':
094: if (token == Token.NULL) {
095: searchBack(lineIndex, line, i);
096: addToken(lineIndex, 1, Token.NULL);
097: lastOffset = i + 1;
098: }
099: break;
100: case '+':
101: case '%':
102: case '&':
103: case '|':
104: case '^':
105: case '~':
106: case '<':
107: case '>':
108: case '=':
109: if (token == Token.NULL) {
110: searchBack(lineIndex, line, i);
111: addToken(lineIndex, 1, Token.OPERATOR);
112: lastOffset = i + 1;
113: }
114: break;
115: case ' ':
116: case '\t':
117: case ';':
118: if (token == Token.NULL) {
119: searchBack(lineIndex, line, i, false);
120: }
121: break;
122: case '/':
123: if (token == Token.NULL) {
124: if (currentLength - i >= 2 && array[i1] == '*') {
125: searchBack(lineIndex, line, i);
126: token = Token.COMMENT1;
127: lastOffset = i;
128: i++;
129: } else {
130: searchBack(lineIndex, line, i);
131: addToken(lineIndex, 1, Token.OPERATOR);
132: lastOffset = i + 1;
133: }
134: }
135: break;
136: case '-':
137: if (token == Token.NULL) {
138: if (currentLength - i >= 2 && array[i1] == '-') {
139: searchBack(lineIndex, line, i);
140: addToken(lineIndex, currentLength - i,
141: Token.COMMENT2);
142: lastOffset = currentLength;
143: break loop;
144: } else {
145: searchBack(lineIndex, line, i);
146: addToken(lineIndex, 1, Token.OPERATOR);
147: lastOffset = i + 1;
148: }
149: }
150: break;
151: case '#':
152: if (isMySql && token == Token.NULL) {
153: if (currentLength - i >= 1) {
154: searchBack(lineIndex, line, i);
155: addToken(lineIndex, currentLength - i,
156: Token.COMMENT2);
157: lastOffset = currentLength;
158: break loop;
159: }
160: }
161: break;
162: case '"':
163: case '\'':
164: if (token == Token.NULL) {
165: literalChar = array[i];
166: token = getLiteralId(literalChar);
167: addToken(lineIndex, i - lastOffset, Token.NULL);
168: lastOffset = i;
169: } else if (Token.isLiteral(token)
170: && literalChar == array[i]) {
171: token = Token.NULL;
172: addToken(lineIndex, (i + 1) - lastOffset,
173: getLiteralId(literalChar));
174: literalChar = 0;
175: lastOffset = i + 1;
176: }
177: break;
178: default:
179: break;
180: }
181: }
182:
183: if (token == Token.NULL) {
184: searchBack(lineIndex, line, currentLength, false);
185: }
186:
187: if (lastOffset != currentLength) {
188: addToken(lineIndex, currentLength - lastOffset, token);
189: }
190: }
191:
192: private void searchBack(int lineIndex, Segment line, int pos) {
193: searchBack(lineIndex, line, pos, true);
194: }
195:
196: private void searchBack(int lineIndex, Segment line, int pos,
197: boolean padNull) {
198: int len = pos - lastKeyword;
199: byte id = keywords.lookup(line, lastKeyword, len);
200:
201: if (id != Token.NULL) {
202: if (lastKeyword != lastOffset) {
203: addToken(lineIndex, lastKeyword - lastOffset,
204: Token.NULL);
205: }
206: addToken(lineIndex, len, id);
207: lastOffset = pos;
208: }
209:
210: lastKeyword = pos + 1;
211: if (padNull && lastOffset < pos) {
212: addToken(lineIndex, pos - lastOffset, Token.NULL);
213: }
214: }
215: }
|