001: // RuleParserException.java
002: // $Id: RuleParser.java,v 1.5 2000/08/16 21:38:05 ylafon Exp $
003: // (c) COPYRIGHT MIT and INRIA, 1998.
004: // Please first read the full copyright statement in file COPYRIGHT.html
005:
006: package org.w3c.www.protocol.http.proxy;
007:
008: import java.util.StringTokenizer;
009: import java.util.Vector;
010:
011: import java.io.BufferedReader;
012: import java.io.IOException;
013: import java.io.InputStream;
014: import java.io.InputStreamReader;
015: import java.io.StreamTokenizer;
016:
017: /**
018: * A simple Rule parser.
019: */
020:
021: public class RuleParser {
022:
023: InputStream in = null;
024: RuleNode root = null;
025:
026: /**
027: * Add a mapping for the given rule in our rule node.
028: * @param lhs The rule left hand side, as a parsed String array.
029: * @param rule The mapped rule instance.
030: */
031:
032: protected void addRule(String lhs[], Rule rule) {
033: RuleNode node = root;
034: int lhslen = lhs.length;
035: if (!lhs[0].equals("default")) {
036: for (int i = lhslen; --i >= 0;) {
037: RuleNode child = node.lookup(lhs[i]);
038: if (child == null)
039: child = node.addChild(lhs[i]);
040: node = child;
041: }
042: }
043: node.setRule(rule);
044: }
045:
046: /**
047: * Create a suitable rule mapping for the tokenized rule.
048: * @param tokens The rule tokens, as a String array.
049: * @param toklen Number of tokens in above array.
050: * @exception RuleParserException if parsing failed.
051: */
052:
053: protected void parseRule(String tokens[], int toklen)
054: throws RuleParserException {
055: // Get and parse the rule left hand side first:
056: StringTokenizer st = new StringTokenizer((String) tokens[0],
057: ".");
058: Vector vlhs = new Vector();
059: String vls;
060: boolean isnum = false;
061: while (st.hasMoreTokens()) {
062: isnum = true;
063: vls = st.nextToken();
064: for (int i = 0; isnum && (i < vls.length()); i++)
065: isnum = (vls.charAt(i) >= '0')
066: && (vls.charAt(i) <= '9');
067: vlhs.addElement(vls);
068: }
069: // if numeric, reverse the order of tokens
070: if (isnum) {
071: int vs = vlhs.size();
072: for (int i = 0; i < vs; i++) {
073: vlhs.addElement(vlhs.elementAt(vs - i - 1));
074: vlhs.removeElementAt(vs - i - 1);
075: }
076: }
077: String slhs[] = new String[vlhs.size()];
078: vlhs.copyInto(slhs);
079: // Build a rule instance:
080: Rule rule = Rule.createRule(tokens, 1, toklen);
081: // Install the rule in our root node:
082: addRule(slhs, rule);
083: }
084:
085: /**
086: * Parse the our input stream into a RuleNode instance.
087: * @exception IOException If reading the rule input stream failed.
088: * @exception RuleParserException If some invalid rule syntax was
089: * detected.
090: */
091:
092: public RuleNode parse() throws RuleParserException, IOException {
093: // Initialize the stream tokenizer:
094: boolean eof = false;
095: BufferedReader br = new BufferedReader(
096: new InputStreamReader(in));
097: StreamTokenizer st = new StreamTokenizer(br);
098: // do syntax by hand
099: st.resetSyntax();
100: st.wordChars('a', 'z');
101: st.wordChars('A', 'Z');
102: st.wordChars('0', '9');
103: st.wordChars(128 + 32, 255);
104: st.whitespaceChars(0, ' ');
105: st.wordChars(33, 128);
106: st.commentChar('#');
107: st.eolIsSignificant(true);
108: st.lowerCaseMode(true);
109: // Create the root node, to be returned:
110: root = new RuleNode();
111: String tokens[] = new String[32];
112: int toklen = 0;
113: // Parse input:
114: while (!eof) {
115: // Read one line of input, parse it:
116: while (!eof) {
117: int tt = -1;
118: switch (tt = st.nextToken()) {
119: case StreamTokenizer.TT_EOF:
120: eof = true;
121: if (toklen > 0) {
122: try {
123: parseRule(tokens, toklen);
124: } catch (RuleParserException ex) {
125: String msg = ("Error while parsing rule file, "
126: + "line " + st.lineno() + ": " + ex
127: .getMessage());
128: throw new RuleParserException(msg);
129: }
130: toklen = 0;
131: }
132: break;
133: case StreamTokenizer.TT_EOL:
134: if (toklen > 0) {
135: try {
136: parseRule(tokens, toklen);
137: } catch (RuleParserException ex) {
138: String msg = ("Error while parsing rule file, "
139: + "line " + st.lineno() + ": " + ex
140: .getMessage());
141: throw new RuleParserException(msg);
142: }
143: toklen = 0;
144: }
145: break;
146: case StreamTokenizer.TT_WORD:
147: // Add that token:
148: if (toklen + 1 >= tokens.length) {
149: String newtok[] = new String[tokens.length + 8];
150: System.arraycopy(tokens, 0, newtok, 0, toklen);
151: tokens = newtok;
152: }
153: tokens[toklen++] = st.sval;
154: break;
155: default:
156: throw new RuleParserException(
157: "Invalid syntax, line " + st.lineno() + ".");
158: }
159: }
160: }
161: return root;
162: }
163:
164: /**
165: * Create a rule parser to parse the given input stream.
166: */
167:
168: public RuleParser(InputStream in) {
169: this.in = in;
170: }
171:
172: }
|