001: /*
002: * TestMultithreadTokenizer.java: JUnit test for Tokenizer implementations
003: *
004: * Copyright (C) 2002 Heiko Blau
005: *
006: * This file belongs to the JTopas test suite.
007: * The JTopas test suite is free software; you can redistribute it and/or modify it
008: * under the terms of the GNU Lesser General Public License as published by the
009: * Free Software Foundation; either version 2.1 of the License, or (at your option)
010: * any later version.
011: *
012: * This software is distributed in the hope that it will be useful, but WITHOUT
013: * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
014: * FITNESS FOR A PARTICULAR PURPOSE.
015: * See the GNU Lesser General Public License for more details.
016: *
017: * You should have received a copy of the GNU Lesser General Public License along
018: * with the JTopas test suite. If not, write to the
019: *
020: * Free Software Foundation, Inc.
021: * 59 Temple Place, Suite 330,
022: * Boston, MA 02111-1307
023: * USA
024: *
025: * or check the Internet: http://www.fsf.org
026: *
027: * The JTopas test suite uses the test framework JUnit by Kent Beck and Erich Gamma.
028: * You should have received a copy of their JUnit licence agreement along with
029: * the JTopas test suite.
030: *
031: * We do NOT provide the JUnit archive junit.jar nessecary to compile and run
032: * our tests, since we assume, that You either have it already or would like
033: * to get the current release Yourself.
034: * Please visit either:
035: * http://sourceforge.net/projects/junit
036: * or
037: * http://junit.org
038: * to obtain JUnit.
039: *
040: * Contact:
041: * email: heiko@susebox.de
042: */
043:
044: package de.susebox.jtopas;
045:
046: //-----------------------------------------------------------------------------
047: // Imports
048: //
049: import java.util.Iterator;
050: import java.util.Random;
051: import java.util.ArrayList;
052:
053: import java.io.Reader;
054: import java.io.StringReader;
055:
056: import junit.framework.Test;
057: import junit.framework.TestCase;
058: import junit.framework.TestSuite;
059: import junit.framework.Assert;
060:
061: import de.susebox.java.lang.ExtRuntimeException;
062:
063: import de.susebox.TestUtilities;
064:
065: //-----------------------------------------------------------------------------
066: // Class TestMultithreadTokenizer
067: //
068:
069: /**<p>
070: * This class tests {@link Tokenizer} implementations in a multithreaded environment.
071: *</p>
072: *
073: * @see Tokenizer
074: * @author Heiko Blau
075: */
076: public class TestMultithreadTokenizer extends TestCase {
077:
078: //---------------------------------------------------------------------------
079: // main method
080: //
081:
082: /**
083: * call this method to invoke the tests.
084: */
085: public static void main(String[] args) {
086: String[] tests = { TestMultithreadTokenizer.class.getName() };
087:
088: TestUtilities.run(tests, args);
089: }
090:
091: //---------------------------------------------------------------------------
092: // suite method
093: //
094:
095: /**
096: * Implementation of the JUnit method <code>suite</code>. For each set of test
097: * properties one or more tests are instantiated.
098: *
099: * @return a test suite
100: */
101: public static Test suite() {
102: TestSuite suite = new TestSuite(TestMultithreadTokenizer.class
103: .getName());
104:
105: suite.addTest(new TestMultithreadTokenizer(
106: "testParallelParsing"));
107: return suite;
108: }
109:
110: //---------------------------------------------------------------------------
111: // Constructor
112: //
113:
114: /**
115: * Default constructor. Standard input {@link java.lang.System#in} is used
116: * to construct the input stream reader.
117: */
118: public TestMultithreadTokenizer(String test) {
119: super (test);
120: }
121:
122: //---------------------------------------------------------------------------
123: // Fixture setup and release
124: //
125:
126: /**
127: * Sets up the fixture, for example, open a network connection.
128: * This method is called before a test is executed.
129: */
130: protected void setUp() throws Exception {
131: _properties = new StandardTokenizerProperties();
132: for (int index = 0; index < _testProperties.length; ++index) {
133: _properties.addProperty(_testProperties[index]);
134: }
135: }
136:
137: /**
138: * Tears down the fixture, for example, close a network connection.
139: * This method is called after a test is executed.
140: */
141: protected void tearDown() throws Exception {
142: _properties = null;
143: }
144:
145: //---------------------------------------------------------------------------
146: // test cases
147: //
148:
149: /**
150: * Testing generic methods.
151: */
152: public void testParallelParsing() throws Throwable {
153: Random random = new Random();
154: StringBuffer[] active = new StringBuffer[_numberOfThreads];
155: String[] last = new String[_numberOfThreads];
156: Runner[] runner = new Runner[_numberOfThreads];
157: Thread[] thread = new Thread[_numberOfThreads];
158: long start = System.currentTimeMillis();
159:
160: // Create TokenizerProperties
161: _properties = new StandardTokenizerProperties(
162: Flags.F_RETURN_WHITESPACES);
163:
164: for (int index = 0; index < _testProperties.length; ++index) {
165: _properties.addProperty(_testProperties[index]);
166: }
167:
168: // Create resources
169: for (int index = 0; index < _numberOfThreads; ++index) {
170: active[index] = new StringBuffer("0");
171: runner[index] = new Runner(this , random
172: .nextInt(_testTexts.length), active[index]);
173: thread[index] = new Thread(runner[index]);
174: thread[index].setDaemon(true);
175: }
176:
177: // start threads and check actions
178: try {
179: for (int index = 0; index < _numberOfThreads; ++index) {
180: thread[index].start();
181: }
182:
183: while (System.currentTimeMillis() - start < _duration * 1000) {
184: // Pause depending on thread count
185: synchronized (this ) {
186: try {
187: wait(3000);
188: } catch (InterruptedException ex) {
189: }
190: }
191:
192: // print activity
193: for (int index = 0; index < _numberOfThreads; ++index) {
194: System.out.println(System.currentTimeMillis()
195: + ": Activity at runner " + index + ": "
196: + active[index]);
197: last[index] = active[index].toString();
198: }
199: }
200:
201: // stop the threads
202: for (int index = 0; index < _numberOfThreads; ++index) {
203: runner[index].stop();
204: }
205: Thread.sleep(1000);
206:
207: // check activity
208: for (int index = 0; index < _numberOfThreads; ++index) {
209: String activity = active[index].toString();
210:
211: assertTrue(
212: "No good activity at runner " + index + ": "
213: + activity,
214: new Integer(activity).intValue() > (50 / _numberOfThreads)
215: * _duration);
216: }
217:
218: } finally {
219: for (int index = 0; index < _numberOfThreads; ++index) {
220: thread[index] = null;
221: }
222: }
223: }
224:
225: //---------------------------------------------------------------------------
226: // class members
227: //
228: private static int _numberOfThreads = 30;
229: private static int _duration = 60;
230:
231: /**
232: * Table with properties
233: */
234: private static final TokenizerProperty[] _testProperties = {
235: new TokenizerProperty(Token.STRING, new String[] { "\"",
236: "\"", "\\" }, null, 0, Flags.F_NO_CASE),
237: new TokenizerProperty(Token.STRING, new String[] { "'",
238: "'", "\\" }, null, 0, Flags.F_NO_CASE),
239: new TokenizerProperty(Token.LINE_COMMENT,
240: new String[] { "//" }, null, 0, Flags.F_NO_CASE),
241: new TokenizerProperty(Token.BLOCK_COMMENT, new String[] {
242: "/*", "*/" }, null, 0, Flags.F_NO_CASE),
243: new TokenizerProperty(Token.BLOCK_COMMENT, new String[] {
244: "/**", "*/" }, null, 0, Flags.F_NO_CASE),
245: new TokenizerProperty(Token.KEYWORD, new String[] { "if" },
246: null, 0, Flags.F_NO_CASE),
247: new TokenizerProperty(Token.KEYWORD,
248: new String[] { "else" }, null, 0, Flags.F_NO_CASE),
249: new TokenizerProperty(Token.KEYWORD,
250: new String[] { "return" }, null, 0, Flags.F_NO_CASE),
251: new TokenizerProperty(Token.KEYWORD,
252: new String[] { "native" }, null, 0, Flags.F_NO_CASE),
253: new TokenizerProperty(Token.KEYWORD,
254: new String[] { "for" }, null, 0, Flags.F_NO_CASE),
255: new TokenizerProperty(Token.KEYWORD,
256: new String[] { "while" }, null, 0, Flags.F_NO_CASE),
257: new TokenizerProperty(Token.KEYWORD, new String[] { "do" },
258: null, 0, Flags.F_NO_CASE),
259: new TokenizerProperty(Token.KEYWORD,
260: new String[] { "switch" }, null, 0, Flags.F_NO_CASE),
261: new TokenizerProperty(Token.KEYWORD,
262: new String[] { "case" }, null, 0, Flags.F_NO_CASE),
263: new TokenizerProperty(Token.KEYWORD,
264: new String[] { "default" }, null, 0,
265: Flags.F_NO_CASE),
266: new TokenizerProperty(Token.KEYWORD,
267: new String[] { "break" }, null, 0, Flags.F_NO_CASE),
268: new TokenizerProperty(Token.KEYWORD,
269: new String[] { "class" }, null, 0, Flags.F_NO_CASE),
270: new TokenizerProperty(Token.KEYWORD,
271: new String[] { "interface" }, null, 0,
272: Flags.F_NO_CASE),
273: new TokenizerProperty(Token.KEYWORD,
274: new String[] { "synchronized" }, null, 0,
275: Flags.F_NO_CASE),
276: new TokenizerProperty(Token.KEYWORD,
277: new String[] { "public" }, null, 0, Flags.F_NO_CASE),
278: new TokenizerProperty(Token.KEYWORD,
279: new String[] { "protected" }, null, 0,
280: Flags.F_NO_CASE),
281: new TokenizerProperty(Token.KEYWORD,
282: new String[] { "private" }, null, 0,
283: Flags.F_NO_CASE),
284: new TokenizerProperty(Token.KEYWORD,
285: new String[] { "final" }, null, 0, Flags.F_NO_CASE),
286: new TokenizerProperty(Token.KEYWORD,
287: new String[] { "static" }, null, 0, Flags.F_NO_CASE),
288: new TokenizerProperty(Token.KEYWORD,
289: new String[] { "implements" }, null, 0,
290: Flags.F_NO_CASE),
291: new TokenizerProperty(Token.KEYWORD,
292: new String[] { "extends" }, null, 0,
293: Flags.F_NO_CASE),
294: new TokenizerProperty(Token.KEYWORD,
295: new String[] { "byte" }, null, 0, Flags.F_NO_CASE),
296: new TokenizerProperty(Token.KEYWORD,
297: new String[] { "char" }, null, 0, Flags.F_NO_CASE),
298: new TokenizerProperty(Token.KEYWORD,
299: new String[] { "int" }, null, 0, Flags.F_NO_CASE),
300: new TokenizerProperty(Token.KEYWORD,
301: new String[] { "long" }, null, 0, Flags.F_NO_CASE),
302: new TokenizerProperty(Token.KEYWORD,
303: new String[] { "double" }, null, 0, Flags.F_NO_CASE),
304: new TokenizerProperty(Token.KEYWORD,
305: new String[] { "String" }, null, 0, Flags.F_NO_CASE),
306: new TokenizerProperty(Token.KEYWORD,
307: new String[] { "boolean" }, null, 0,
308: Flags.F_NO_CASE),
309: new TokenizerProperty(Token.KEYWORD,
310: new String[] { "void" }, null, 0, Flags.F_NO_CASE),
311: new TokenizerProperty(Token.KEYWORD,
312: new String[] { "throw" }, null, 0, Flags.F_NO_CASE),
313: new TokenizerProperty(Token.KEYWORD,
314: new String[] { "throws" }, null, 0, Flags.F_NO_CASE),
315: new TokenizerProperty(Token.KEYWORD,
316: new String[] { "new" }, null, 0, Flags.F_NO_CASE),
317: new TokenizerProperty(Token.KEYWORD,
318: new String[] { "assert" }, null, 0, Flags.F_NO_CASE),
319: new TokenizerProperty(Token.KEYWORD,
320: new String[] { "try" }, null, 0, Flags.F_NO_CASE),
321: new TokenizerProperty(Token.KEYWORD,
322: new String[] { "catch" }, null, 0, Flags.F_NO_CASE),
323: new TokenizerProperty(Token.KEYWORD,
324: new String[] { "finally" }, null, 0,
325: Flags.F_NO_CASE),
326: new TokenizerProperty(Token.KEYWORD,
327: new String[] { "import" }, null, 0, Flags.F_NO_CASE),
328: new TokenizerProperty(Token.KEYWORD,
329: new String[] { "package" }, null, 0,
330: Flags.F_NO_CASE),
331: new TokenizerProperty(Token.KEYWORD,
332: new String[] { "this" }, null, 0, Flags.F_NO_CASE),
333: new TokenizerProperty(Token.KEYWORD,
334: new String[] { "super" }, null, 0, Flags.F_NO_CASE),
335: new TokenizerProperty(Token.KEYWORD,
336: new String[] { "null" }, null, 0, Flags.F_NO_CASE),
337: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
338: new String[] { "," }, null, 0, Flags.F_NO_CASE),
339: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
340: new String[] { ";" }, null, 0, Flags.F_NO_CASE),
341: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
342: new String[] { "=" }, null, 0, Flags.F_NO_CASE),
343: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
344: new String[] { "==" }, null, 0, Flags.F_NO_CASE),
345: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
346: new String[] { "!=" }, null, 0, Flags.F_NO_CASE),
347: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
348: new String[] { ">=" }, null, 0, Flags.F_NO_CASE),
349: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
350: new String[] { ">>=" }, null, 0, Flags.F_NO_CASE),
351: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
352: new String[] { "<=" }, null, 0, Flags.F_NO_CASE),
353: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
354: new String[] { "<<=" }, null, 0, Flags.F_NO_CASE),
355: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
356: new String[] { ">" }, null, 0, Flags.F_NO_CASE),
357: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
358: new String[] { "<" }, null, 0, Flags.F_NO_CASE),
359: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
360: new String[] { "+=" }, null, 0, Flags.F_NO_CASE),
361: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
362: new String[] { "-=" }, null, 0, Flags.F_NO_CASE),
363: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
364: new String[] { "*=" }, null, 0, Flags.F_NO_CASE),
365: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
366: new String[] { "/=" }, null, 0, Flags.F_NO_CASE),
367: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
368: new String[] { "&=" }, null, 0, Flags.F_NO_CASE),
369: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
370: new String[] { "|=" }, null, 0, Flags.F_NO_CASE),
371: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
372: new String[] { "<<" }, null, 0, Flags.F_NO_CASE),
373: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
374: new String[] { ">>" }, null, 0, Flags.F_NO_CASE),
375: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
376: new String[] { ">>>" }, null, 0, Flags.F_NO_CASE),
377: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
378: new String[] { "++" }, null, 0, Flags.F_NO_CASE),
379: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
380: new String[] { "--" }, null, 0, Flags.F_NO_CASE),
381: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
382: new String[] { "~" }, null, 0, Flags.F_NO_CASE),
383: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
384: new String[] { "*" }, null, 0, Flags.F_NO_CASE),
385: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
386: new String[] { "/" }, null, 0, Flags.F_NO_CASE),
387: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
388: new String[] { "%" }, null, 0, Flags.F_NO_CASE),
389: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
390: new String[] { "^" }, null, 0, Flags.F_NO_CASE),
391: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
392: new String[] { "+" }, null, 0, Flags.F_NO_CASE),
393: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
394: new String[] { "-" }, null, 0, Flags.F_NO_CASE),
395: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
396: new String[] { "." }, null, 0, Flags.F_NO_CASE),
397: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
398: new String[] { "(" }, null, 0, Flags.F_NO_CASE),
399: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
400: new String[] { ")" }, null, 0, Flags.F_NO_CASE),
401: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
402: new String[] { "{" }, null, 0, Flags.F_NO_CASE),
403: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
404: new String[] { "}" }, null, 0, Flags.F_NO_CASE),
405: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
406: new String[] { "[" }, null, 0, Flags.F_NO_CASE),
407: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
408: new String[] { "]" }, null, 0, Flags.F_NO_CASE),
409: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
410: new String[] { "?" }, null, 0, Flags.F_NO_CASE),
411: new TokenizerProperty(Token.SPECIAL_SEQUENCE,
412: new String[] { ":" }, null, 0, Flags.F_NO_CASE) };
413:
414: /**
415: * Table with texts to parse
416: */
417: private static final String[] _testTexts = {
418: "import junit.framework.TestCase;\n"
419: + "\n"
420: + "public class MyTest extends TestCase implements TestListener {\n"
421: + "/** default constructor */\n"
422: + "public MyTest() {\n" + "this(null);\n" + "}\n"
423: + "\n" + "/**\n" + " * constructor taking name\n"
424: + " *\n" + " * @param name the name\n" + " */\n"
425: + "public MyTest(String name) {\n"
426: + "setName(name);\n" + "}\n" + "\n" + "/**\n"
427: + " * Getting the name\n" + " *\n"
428: + " * @return the name\n" + " */\n"
429: + "public String getName() {\n"
430: + "return _myName;\n" + "}\n" + "\n" + "/**\n"
431: + " * Setting a new name\n" + " *\n"
432: + " * @param name the new name\n"
433: + " * @return the old name or <code>null</code>\n"
434: + " */\n"
435: + "public String setName(String name) {\n"
436: + "// setting _myName safely to a non-null value\n"
437: + "_myName = (name != null) ? name : \"\";\n"
438: + "}\n" + "\n" + "// Members\n"
439: + "private String _myName = null;\n" + "}\n",
440:
441: // second text
442: "// package declaration\r\n"
443: + "package my.domain.toppackage.subpackage;\r\n"
444: + "\r\n"
445: + "// imports\r\n"
446: + "import java.applet.Applet;\r\n"
447: + "import java.util.ArrayList;\r\n"
448: + "import java.io.InputStream;\r\n"
449: + "import java.io.InputStreamReader;\r\n"
450: + "import java.io.FileInputStream;\r\n"
451: + "import java.io.StringReader;\r\n"
452: + "import java.io.IOException;\r\n"
453: + "\n"
454: + "/**\r\n"
455: + " * A class for parsing only :-)\r\n"
456: + " */\r\n"
457: + "public class MyRunner extends Applet implements Runnable {\r\n"
458: + "/** default constructor */\r\n"
459: + "public MyRunner() {\r\n"
460: + "super();\r\n"
461: + "}\r\n"
462: + "\r\n"
463: + "/**\r\n"
464: + " * constructor taking name\r\n"
465: + " *\r\n"
466: + " * @param name the name\r\n"
467: + " */\r\n"
468: + "public MyRunner(String name) {\r\n"
469: + "super(name);\r\n"
470: + "}\r\n"
471: + "\r\n"
472: + "/**\r\n"
473: + " * Getting the name\r\n"
474: + " *\r\n"
475: + " * @return the name\r\n"
476: + " */\r\n"
477: + "public String getName() {\r\n"
478: + "return super.getName();\r\n"
479: + "}\r\n"
480: + "\r\n"
481: + "/**\n"
482: + " * Run method a defined in {@link java.lang.Runnable}.\r\n"
483: + " */\r\n"
484: + "public void run() throws Throwable {\r\n"
485: + "Thread thread = Thread.currentThread();\r\n"
486: + "long count = 0;\r\n" + "\r\n"
487: + "while (Thread.currentThread() == this) {\r\n"
488: + "count++;\r\n" + "_shifter >>= 1;\r\n"
489: + "synchronized(this){\r\n" + "try {\r\n"
490: + "wait((count % 100) + 10);\r\n"
491: + "} catch (Exception ex) {\r\n" + "break;\r\n"
492: + "} finally {\r\n" + "_shifter = 0;\r\n" + "}\r\n"
493: + "}\r\n" + "}\r\n" + "}\r\n" + "\r\n"
494: + "// Members\r\n"
495: + "private long _shifter = 0;\r\n" + "}" };
496:
497: /**
498: * The expected tokenizing results
499: */
500: protected static final int _expectedResults[][] = {
501: { Token.KEYWORD, Token.WHITESPACE, Token.NORMAL,
502: Token.SPECIAL_SEQUENCE, Token.NORMAL,
503: Token.SPECIAL_SEQUENCE, Token.NORMAL,
504: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
505: Token.KEYWORD, Token.WHITESPACE, Token.KEYWORD,
506: Token.WHITESPACE, Token.NORMAL, Token.WHITESPACE,
507: Token.KEYWORD, Token.WHITESPACE, Token.NORMAL,
508: Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE,
509: Token.NORMAL, Token.WHITESPACE,
510: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
511: Token.BLOCK_COMMENT, Token.WHITESPACE,
512: Token.KEYWORD, Token.WHITESPACE, Token.NORMAL,
513: Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE,
514: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
515: Token.WHITESPACE, Token.KEYWORD,
516: Token.SPECIAL_SEQUENCE, Token.KEYWORD,
517: Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE,
518: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
519: Token.WHITESPACE, Token.BLOCK_COMMENT,
520: Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE,
521: Token.NORMAL, Token.SPECIAL_SEQUENCE,
522: Token.KEYWORD, Token.WHITESPACE, Token.NORMAL,
523: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
524: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
525: Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL,
526: Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE,
527: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
528: Token.WHITESPACE, Token.BLOCK_COMMENT,
529: Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE,
530: Token.KEYWORD, Token.WHITESPACE, Token.NORMAL,
531: Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE,
532: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
533: Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE,
534: Token.NORMAL, Token.SPECIAL_SEQUENCE,
535: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
536: Token.WHITESPACE, Token.BLOCK_COMMENT,
537: Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE,
538: Token.KEYWORD, Token.WHITESPACE, Token.NORMAL,
539: Token.SPECIAL_SEQUENCE, Token.KEYWORD,
540: Token.WHITESPACE, Token.NORMAL,
541: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
542: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
543: Token.LINE_COMMENT, Token.NORMAL, Token.WHITESPACE,
544: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
545: Token.SPECIAL_SEQUENCE, Token.NORMAL,
546: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
547: Token.WHITESPACE, Token.KEYWORD,
548: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
549: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
550: Token.NORMAL, Token.WHITESPACE,
551: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
552: Token.STRING, Token.SPECIAL_SEQUENCE,
553: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
554: Token.WHITESPACE, Token.LINE_COMMENT,
555: Token.KEYWORD, Token.WHITESPACE, Token.KEYWORD,
556: Token.WHITESPACE, Token.NORMAL, Token.WHITESPACE,
557: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
558: Token.KEYWORD, Token.SPECIAL_SEQUENCE,
559: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
560: Token.WHITESPACE, Token.EOF },
561: {
562: Token.LINE_COMMENT, // 0
563: Token.KEYWORD,
564: Token.WHITESPACE,
565: Token.NORMAL,
566: Token.SPECIAL_SEQUENCE,
567: Token.NORMAL,
568: Token.SPECIAL_SEQUENCE,
569: Token.NORMAL,
570: Token.SPECIAL_SEQUENCE,
571: Token.NORMAL,
572: Token.SPECIAL_SEQUENCE,
573: Token.WHITESPACE,
574: Token.LINE_COMMENT, // 12
575: Token.KEYWORD,
576: Token.WHITESPACE,
577: Token.NORMAL,
578: Token.SPECIAL_SEQUENCE,
579: Token.NORMAL,
580: Token.SPECIAL_SEQUENCE,
581: Token.NORMAL,
582: Token.SPECIAL_SEQUENCE,
583: Token.WHITESPACE, // 13
584: Token.KEYWORD,
585: Token.WHITESPACE,
586: Token.NORMAL,
587: Token.SPECIAL_SEQUENCE,
588: Token.NORMAL,
589: Token.SPECIAL_SEQUENCE,
590: Token.NORMAL,
591: Token.SPECIAL_SEQUENCE,
592: Token.WHITESPACE, // 22
593: Token.KEYWORD,
594: Token.WHITESPACE,
595: Token.NORMAL,
596: Token.SPECIAL_SEQUENCE,
597: Token.NORMAL,
598: Token.SPECIAL_SEQUENCE,
599: Token.NORMAL,
600: Token.SPECIAL_SEQUENCE,
601: Token.WHITESPACE, // 31
602: Token.KEYWORD,
603: Token.WHITESPACE,
604: Token.NORMAL,
605: Token.SPECIAL_SEQUENCE,
606: Token.NORMAL,
607: Token.SPECIAL_SEQUENCE,
608: Token.NORMAL,
609: Token.SPECIAL_SEQUENCE,
610: Token.WHITESPACE, // 40
611: Token.KEYWORD,
612: Token.WHITESPACE,
613: Token.NORMAL,
614: Token.SPECIAL_SEQUENCE,
615: Token.NORMAL,
616: Token.SPECIAL_SEQUENCE,
617: Token.NORMAL,
618: Token.SPECIAL_SEQUENCE,
619: Token.WHITESPACE, // 49
620: Token.KEYWORD,
621: Token.WHITESPACE,
622: Token.NORMAL,
623: Token.SPECIAL_SEQUENCE,
624: Token.NORMAL,
625: Token.SPECIAL_SEQUENCE,
626: Token.NORMAL,
627: Token.SPECIAL_SEQUENCE,
628: Token.WHITESPACE, // 58
629: Token.KEYWORD,
630: Token.WHITESPACE,
631: Token.NORMAL,
632: Token.SPECIAL_SEQUENCE,
633: Token.NORMAL,
634: Token.SPECIAL_SEQUENCE,
635: Token.NORMAL,
636: Token.SPECIAL_SEQUENCE,
637: Token.WHITESPACE, // 67
638: Token.BLOCK_COMMENT,
639: Token.WHITESPACE, // 76
640: Token.KEYWORD,
641: Token.WHITESPACE,
642: Token.KEYWORD,
643: Token.WHITESPACE,
644: Token.NORMAL,
645: Token.WHITESPACE,
646: Token.KEYWORD,
647: Token.WHITESPACE,
648: Token.NORMAL,
649: Token.WHITESPACE,
650: Token.KEYWORD,
651: Token.WHITESPACE,
652: Token.NORMAL,
653: Token.WHITESPACE,
654: Token.SPECIAL_SEQUENCE,
655: Token.WHITESPACE, // 78
656: Token.BLOCK_COMMENT,
657: Token.WHITESPACE, // 94
658: // "public MyRunner() {\r\n"
659: Token.KEYWORD,
660: Token.WHITESPACE,
661: Token.NORMAL,
662: Token.SPECIAL_SEQUENCE,
663: Token.SPECIAL_SEQUENCE,
664: Token.WHITESPACE,
665: Token.SPECIAL_SEQUENCE,
666: Token.WHITESPACE, // 87
667: Token.KEYWORD, Token.SPECIAL_SEQUENCE,
668: Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE,
669: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
670: Token.WHITESPACE, Token.BLOCK_COMMENT,
671: Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE,
672: Token.NORMAL, Token.SPECIAL_SEQUENCE,
673: Token.KEYWORD, Token.WHITESPACE, Token.NORMAL,
674: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
675: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
676: Token.KEYWORD, Token.SPECIAL_SEQUENCE,
677: Token.NORMAL, Token.SPECIAL_SEQUENCE,
678: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
679: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
680: Token.BLOCK_COMMENT, Token.WHITESPACE,
681: Token.KEYWORD, Token.WHITESPACE, Token.KEYWORD,
682: Token.WHITESPACE, Token.NORMAL,
683: Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE,
684: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
685: Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE,
686: Token.KEYWORD, Token.SPECIAL_SEQUENCE,
687: Token.NORMAL, Token.SPECIAL_SEQUENCE,
688: Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE,
689: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
690: Token.WHITESPACE, Token.BLOCK_COMMENT,
691: Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE,
692: Token.KEYWORD, Token.WHITESPACE, Token.NORMAL,
693: Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE,
694: Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE,
695: Token.NORMAL, Token.WHITESPACE,
696: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
697: Token.NORMAL, Token.WHITESPACE, Token.NORMAL,
698: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
699: Token.WHITESPACE, Token.NORMAL,
700: Token.SPECIAL_SEQUENCE, Token.NORMAL,
701: Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE,
702: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
703: Token.KEYWORD, Token.WHITESPACE, Token.NORMAL,
704: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
705: Token.WHITESPACE, Token.NORMAL,
706: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
707: Token.KEYWORD, Token.WHITESPACE,
708: Token.SPECIAL_SEQUENCE, Token.NORMAL,
709: Token.SPECIAL_SEQUENCE, Token.NORMAL,
710: Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE,
711: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
712: Token.WHITESPACE, Token.KEYWORD,
713: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
714: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
715: Token.NORMAL, Token.SPECIAL_SEQUENCE,
716: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
717: Token.NORMAL, Token.WHITESPACE,
718: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
719: Token.NORMAL, Token.SPECIAL_SEQUENCE,
720: Token.WHITESPACE, Token.KEYWORD,
721: Token.SPECIAL_SEQUENCE, Token.KEYWORD,
722: Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE,
723: Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE,
724: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
725: Token.NORMAL, Token.SPECIAL_SEQUENCE,
726: Token.SPECIAL_SEQUENCE, Token.NORMAL,
727: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
728: Token.WHITESPACE, Token.NORMAL,
729: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
730: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
731: Token.NORMAL, Token.SPECIAL_SEQUENCE,
732: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
733: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
734: Token.KEYWORD, Token.WHITESPACE,
735: Token.SPECIAL_SEQUENCE, Token.NORMAL,
736: Token.WHITESPACE, Token.NORMAL,
737: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
738: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
739: Token.KEYWORD, Token.SPECIAL_SEQUENCE,
740: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
741: Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE,
742: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
743: Token.NORMAL, Token.WHITESPACE,
744: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
745: Token.NORMAL, Token.SPECIAL_SEQUENCE,
746: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
747: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
748: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
749: Token.WHITESPACE, Token.SPECIAL_SEQUENCE,
750: Token.WHITESPACE, Token.LINE_COMMENT,
751: Token.KEYWORD, Token.WHITESPACE, Token.KEYWORD,
752: Token.WHITESPACE, Token.NORMAL, Token.WHITESPACE,
753: Token.SPECIAL_SEQUENCE, Token.WHITESPACE,
754: Token.NORMAL, Token.SPECIAL_SEQUENCE,
755: Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.EOF } };
756:
757: //---------------------------------------------------------------------------
758: // Members
759: //
760: protected TokenizerProperties _properties = null;
761:
762: //---------------------------------------------------------------------------
763: // inner classes
764: //
765:
766: /**
767: * Thread for TokenizerProperties manipulation
768: */
769: class Runner implements Runnable {
770:
771: /**
772: * Constructor
773: */
774: public Runner(TestMultithreadTokenizer parent, int startIndex,
775: StringBuffer activity) {
776: _parent = parent;
777: _tokenizer = new StandardTokenizer(_parent._properties);
778: _start = startIndex;
779: _activity = activity;
780: }
781:
782: /**
783: * When an object implementing interface <code>Runnable</code> is used
784: * to create a thread, starting the thread causes the object's
785: * <code>run</code> method to be called in that separately executing
786: * thread.
787: * <p>
788: * The general contract of the method <code>run</code> is that it may
789: * take any action whatsoever.
790: *
791: * @see java.lang.Thread#run()
792: */
793: public void run() {
794: Thread thread = Thread.currentThread();
795: String name = thread.getName();
796: int counter = _start;
797: ArrayList tokens = new ArrayList();
798:
799: try {
800: while (Thread.currentThread() == thread && !_stop) {
801: long start = System.currentTimeMillis();
802: int index = counter % _parent._testTexts.length;
803: int[] expected = _parent._expectedResults[index];
804:
805: // parse text
806: _tokenizer
807: .setSource(new ReaderSource(
808: new StringReader(
809: _parent._testTexts[index])));
810: tokens.clear();
811: while (_tokenizer.hasMoreToken()) {
812: tokens.add(_tokenizer.nextToken());
813: }
814:
815: // verify results
816: int typeIndex = 0;
817:
818: while (typeIndex < tokens.size()
819: && typeIndex < expected.length) {
820: Token token = (Token) tokens.get(typeIndex);
821: int type = token.getType();
822:
823: _parent
824: .assertTrue(
825: "Index "
826: + typeIndex
827: + ": Expected type "
828: + Token
829: .getTypeName(expected[typeIndex])
830: + ", found " + token,
831: type == expected[typeIndex]);
832: typeIndex++;
833: }
834: _parent.assertTrue("Expected " + expected.length
835: + " token, found " + tokens.size() + ".",
836: expected.length == tokens.size());
837:
838: // increase counter
839: counter++;
840:
841: // signal activity
842: long value = Long.parseLong(_activity.toString());
843: _activity.setLength(0);
844: _activity.append(value + 1);
845:
846: // pause a little bit
847: synchronized (this ) {
848: try {
849: wait(1);
850: } catch (InterruptedException ex) {
851: }
852: }
853: }
854: } catch (Throwable t) {
855: t.printStackTrace();
856: } finally {
857: _tokenizer.close();
858: }
859: System.out.println(name + ": exiting. Activity: "
860: + _activity);
861: }
862:
863: /**
864: * Signal the thread to stop
865: */
866: public void stop() {
867: synchronized (this ) {
868: _stop = true;
869: }
870: }
871:
872: //-------------------------------------------------------------------------
873: // Members
874: //
875: private TestMultithreadTokenizer _parent = null;
876: private Tokenizer _tokenizer = null;
877: private int _start = 0;
878: private boolean _stop = false;
879: private StringBuffer _activity = null;
880: }
881: }
|