001: /*
002: * Licensed to the Apache Software Foundation (ASF) under one or more
003: * contributor license agreements. See the NOTICE file distributed with
004: * this work for additional information regarding copyright ownership.
005: * The ASF licenses this file to You under the Apache License, Version 2.0
006: * (the "License"); you may not use this file except in compliance with
007: * the License. You may obtain a copy of the License at
008: *
009: * http://www.apache.org/licenses/LICENSE-2.0
010: *
011: * Unless required by applicable law or agreed to in writing, software
012: * distributed under the License is distributed on an "AS IS" BASIS,
013: * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
014: * See the License for the specific language governing permissions and
015: * limitations under the License.
016: */
017:
018: package org.apache.commons.lang.text;
019:
020: import java.util.Arrays;
021: import java.util.Collections;
022: import java.util.List;
023: import java.util.NoSuchElementException;
024:
025: import junit.framework.Test;
026: import junit.framework.TestCase;
027: import junit.framework.TestSuite;
028: import junit.textui.TestRunner;
029:
030: import org.apache.commons.lang.ArrayUtils;
031: import org.apache.commons.lang.ObjectUtils;
032:
033: /**
034: * Unit test for Tokenizer.
035: *
036: * @author Matthew Inger
037: */
038: public class StrTokenizerTest extends TestCase {
039:
040: private static final String CSV_SIMPLE_FIXTURE = "A,b,c";
041:
042: private static final String TSV_SIMPLE_FIXTURE = "A\tb\tc";
043:
044: public static void main(String[] args) {
045: TestRunner.run(suite());
046: }
047:
048: public static Test suite() {
049: TestSuite suite = new TestSuite(StrTokenizerTest.class);
050: suite.setName("TokenizerTest Tests");
051: return suite;
052: }
053:
054: /**
055: * JUnit constructor.
056: *
057: * @param name
058: */
059: public StrTokenizerTest(String name) {
060: super (name);
061: }
062:
063: private void checkClone(StrTokenizer tokenizer) {
064: assertFalse(StrTokenizer.getCSVInstance() == tokenizer);
065: assertFalse(StrTokenizer.getTSVInstance() == tokenizer);
066: }
067:
068: // -----------------------------------------------------------------------
069: public void test1() {
070:
071: String input = "a;b;c;\"d;\"\"e\";f; ; ; ";
072: StrTokenizer tok = new StrTokenizer(input);
073: tok.setDelimiterChar(';');
074: tok.setQuoteChar('"');
075: tok.setIgnoredMatcher(StrMatcher.trimMatcher());
076: tok.setIgnoreEmptyTokens(false);
077: String tokens[] = tok.getTokenArray();
078:
079: String expected[] = new String[] { "a", "b", "c", "d;\"e", "f",
080: "", "", "", };
081:
082: assertEquals(ArrayUtils.toString(tokens), expected.length,
083: tokens.length);
084: for (int i = 0; i < expected.length; i++) {
085: assertTrue("token[" + i + "] was '" + tokens[i]
086: + "' but was expected to be '" + expected[i] + "'",
087: ObjectUtils.equals(expected[i], tokens[i]));
088: }
089:
090: }
091:
092: public void test2() {
093:
094: String input = "a;b;c ;\"d;\"\"e\";f; ; ;";
095: StrTokenizer tok = new StrTokenizer(input);
096: tok.setDelimiterChar(';');
097: tok.setQuoteChar('"');
098: tok.setIgnoredMatcher(StrMatcher.noneMatcher());
099: tok.setIgnoreEmptyTokens(false);
100: String tokens[] = tok.getTokenArray();
101:
102: String expected[] = new String[] { "a", "b", "c ", "d;\"e",
103: "f", " ", " ", "", };
104:
105: assertEquals(ArrayUtils.toString(tokens), expected.length,
106: tokens.length);
107: for (int i = 0; i < expected.length; i++) {
108: assertTrue("token[" + i + "] was '" + tokens[i]
109: + "' but was expected to be '" + expected[i] + "'",
110: ObjectUtils.equals(expected[i], tokens[i]));
111: }
112:
113: }
114:
115: public void test3() {
116:
117: String input = "a;b; c;\"d;\"\"e\";f; ; ;";
118: StrTokenizer tok = new StrTokenizer(input);
119: tok.setDelimiterChar(';');
120: tok.setQuoteChar('"');
121: tok.setIgnoredMatcher(StrMatcher.noneMatcher());
122: tok.setIgnoreEmptyTokens(false);
123: String tokens[] = tok.getTokenArray();
124:
125: String expected[] = new String[] { "a", "b", " c", "d;\"e",
126: "f", " ", " ", "", };
127:
128: assertEquals(ArrayUtils.toString(tokens), expected.length,
129: tokens.length);
130: for (int i = 0; i < expected.length; i++) {
131: assertTrue("token[" + i + "] was '" + tokens[i]
132: + "' but was expected to be '" + expected[i] + "'",
133: ObjectUtils.equals(expected[i], tokens[i]));
134: }
135:
136: }
137:
138: public void test4() {
139:
140: String input = "a;b; c;\"d;\"\"e\";f; ; ;";
141: StrTokenizer tok = new StrTokenizer(input);
142: tok.setDelimiterChar(';');
143: tok.setQuoteChar('"');
144: tok.setIgnoredMatcher(StrMatcher.trimMatcher());
145: tok.setIgnoreEmptyTokens(true);
146: String tokens[] = tok.getTokenArray();
147:
148: String expected[] = new String[] { "a", "b", "c", "d;\"e", "f", };
149:
150: assertEquals(ArrayUtils.toString(tokens), expected.length,
151: tokens.length);
152: for (int i = 0; i < expected.length; i++) {
153: assertTrue("token[" + i + "] was '" + tokens[i]
154: + "' but was expected to be '" + expected[i] + "'",
155: ObjectUtils.equals(expected[i], tokens[i]));
156: }
157:
158: }
159:
160: public void test5() {
161:
162: String input = "a;b; c;\"d;\"\"e\";f; ; ;";
163: StrTokenizer tok = new StrTokenizer(input);
164: tok.setDelimiterChar(';');
165: tok.setQuoteChar('"');
166: tok.setIgnoredMatcher(StrMatcher.trimMatcher());
167: tok.setIgnoreEmptyTokens(false);
168: tok.setEmptyTokenAsNull(true);
169: String tokens[] = tok.getTokenArray();
170:
171: String expected[] = new String[] { "a", "b", "c", "d;\"e", "f",
172: null, null, null, };
173:
174: assertEquals(ArrayUtils.toString(tokens), expected.length,
175: tokens.length);
176: for (int i = 0; i < expected.length; i++) {
177: assertTrue("token[" + i + "] was '" + tokens[i]
178: + "' but was expected to be '" + expected[i] + "'",
179: ObjectUtils.equals(expected[i], tokens[i]));
180: }
181:
182: }
183:
184: public void test6() {
185:
186: String input = "a;b; c;\"d;\"\"e\";f; ; ;";
187: StrTokenizer tok = new StrTokenizer(input);
188: tok.setDelimiterChar(';');
189: tok.setQuoteChar('"');
190: tok.setIgnoredMatcher(StrMatcher.trimMatcher());
191: tok.setIgnoreEmptyTokens(false);
192: // tok.setTreatingEmptyAsNull(true);
193: String tokens[] = tok.getTokenArray();
194:
195: String expected[] = new String[] { "a", "b", " c", "d;\"e",
196: "f", null, null, null, };
197:
198: int nextCount = 0;
199: while (tok.hasNext()) {
200: tok.next();
201: nextCount++;
202: }
203:
204: int prevCount = 0;
205: while (tok.hasPrevious()) {
206: tok.previous();
207: prevCount++;
208: }
209:
210: assertEquals(ArrayUtils.toString(tokens), expected.length,
211: tokens.length);
212:
213: assertTrue("could not cycle through entire token list"
214: + " using the 'hasNext' and 'next' methods",
215: nextCount == expected.length);
216:
217: assertTrue("could not cycle through entire token list"
218: + " using the 'hasPrevious' and 'previous' methods",
219: prevCount == expected.length);
220:
221: }
222:
223: public void test7() {
224:
225: String input = "a b c \"d e\" f ";
226: StrTokenizer tok = new StrTokenizer(input);
227: tok.setDelimiterMatcher(StrMatcher.spaceMatcher());
228: tok.setQuoteMatcher(StrMatcher.doubleQuoteMatcher());
229: tok.setIgnoredMatcher(StrMatcher.noneMatcher());
230: tok.setIgnoreEmptyTokens(false);
231: String tokens[] = tok.getTokenArray();
232:
233: String expected[] = new String[] { "a", "", "", "b", "c",
234: "d e", "f", "", };
235:
236: assertEquals(ArrayUtils.toString(tokens), expected.length,
237: tokens.length);
238: for (int i = 0; i < expected.length; i++) {
239: assertTrue("token[" + i + "] was '" + tokens[i]
240: + "' but was expected to be '" + expected[i] + "'",
241: ObjectUtils.equals(expected[i], tokens[i]));
242: }
243:
244: }
245:
246: public void test8() {
247:
248: String input = "a b c \"d e\" f ";
249: StrTokenizer tok = new StrTokenizer(input);
250: tok.setDelimiterMatcher(StrMatcher.spaceMatcher());
251: tok.setQuoteMatcher(StrMatcher.doubleQuoteMatcher());
252: tok.setIgnoredMatcher(StrMatcher.noneMatcher());
253: tok.setIgnoreEmptyTokens(true);
254: String tokens[] = tok.getTokenArray();
255:
256: String expected[] = new String[] { "a", "b", "c", "d e", "f", };
257:
258: assertEquals(ArrayUtils.toString(tokens), expected.length,
259: tokens.length);
260: for (int i = 0; i < expected.length; i++) {
261: assertTrue("token[" + i + "] was '" + tokens[i]
262: + "' but was expected to be '" + expected[i] + "'",
263: ObjectUtils.equals(expected[i], tokens[i]));
264: }
265:
266: }
267:
268: public void testBasic1() {
269: String input = "a b c";
270: StrTokenizer tok = new StrTokenizer(input);
271: assertEquals("a", tok.next());
272: assertEquals("b", tok.next());
273: assertEquals("c", tok.next());
274: assertEquals(false, tok.hasNext());
275: }
276:
277: public void testBasic2() {
278: String input = "a \nb\fc";
279: StrTokenizer tok = new StrTokenizer(input);
280: assertEquals("a", tok.next());
281: assertEquals("b", tok.next());
282: assertEquals("c", tok.next());
283: assertEquals(false, tok.hasNext());
284: }
285:
286: public void testBasic3() {
287: String input = "a \nb\u0001\fc";
288: StrTokenizer tok = new StrTokenizer(input);
289: assertEquals("a", tok.next());
290: assertEquals("b\u0001", tok.next());
291: assertEquals("c", tok.next());
292: assertEquals(false, tok.hasNext());
293: }
294:
295: public void testBasic4() {
296: String input = "a \"b\" c";
297: StrTokenizer tok = new StrTokenizer(input);
298: assertEquals("a", tok.next());
299: assertEquals("\"b\"", tok.next());
300: assertEquals("c", tok.next());
301: assertEquals(false, tok.hasNext());
302: }
303:
304: public void testBasic5() {
305: String input = "a:b':c";
306: StrTokenizer tok = new StrTokenizer(input, ':', '\'');
307: assertEquals("a", tok.next());
308: assertEquals("b'", tok.next());
309: assertEquals("c", tok.next());
310: assertEquals(false, tok.hasNext());
311: }
312:
313: public void testBasicDelim1() {
314: String input = "a:b:c";
315: StrTokenizer tok = new StrTokenizer(input, ':');
316: assertEquals("a", tok.next());
317: assertEquals("b", tok.next());
318: assertEquals("c", tok.next());
319: assertEquals(false, tok.hasNext());
320: }
321:
322: public void testBasicDelim2() {
323: String input = "a:b:c";
324: StrTokenizer tok = new StrTokenizer(input, ',');
325: assertEquals("a:b:c", tok.next());
326: assertEquals(false, tok.hasNext());
327: }
328:
329: public void testBasicEmpty1() {
330: String input = "a b c";
331: StrTokenizer tok = new StrTokenizer(input);
332: tok.setIgnoreEmptyTokens(false);
333: assertEquals("a", tok.next());
334: assertEquals("", tok.next());
335: assertEquals("b", tok.next());
336: assertEquals("c", tok.next());
337: assertEquals(false, tok.hasNext());
338: }
339:
340: public void testBasicEmpty2() {
341: String input = "a b c";
342: StrTokenizer tok = new StrTokenizer(input);
343: tok.setIgnoreEmptyTokens(false);
344: tok.setEmptyTokenAsNull(true);
345: assertEquals("a", tok.next());
346: assertEquals(null, tok.next());
347: assertEquals("b", tok.next());
348: assertEquals("c", tok.next());
349: assertEquals(false, tok.hasNext());
350: }
351:
352: public void testBasicQuoted1() {
353: String input = "a 'b' c";
354: StrTokenizer tok = new StrTokenizer(input, ' ', '\'');
355: assertEquals("a", tok.next());
356: assertEquals("b", tok.next());
357: assertEquals("c", tok.next());
358: assertEquals(false, tok.hasNext());
359: }
360:
361: public void testBasicQuoted2() {
362: String input = "a:'b':";
363: StrTokenizer tok = new StrTokenizer(input, ':', '\'');
364: tok.setIgnoreEmptyTokens(false);
365: tok.setEmptyTokenAsNull(true);
366: assertEquals("a", tok.next());
367: assertEquals("b", tok.next());
368: assertEquals(null, tok.next());
369: assertEquals(false, tok.hasNext());
370: }
371:
372: public void testBasicQuoted3() {
373: String input = "a:'b''c'";
374: StrTokenizer tok = new StrTokenizer(input, ':', '\'');
375: tok.setIgnoreEmptyTokens(false);
376: tok.setEmptyTokenAsNull(true);
377: assertEquals("a", tok.next());
378: assertEquals("b'c", tok.next());
379: assertEquals(false, tok.hasNext());
380: }
381:
382: public void testBasicQuoted4() {
383: String input = "a: 'b' 'c' :d";
384: StrTokenizer tok = new StrTokenizer(input, ':', '\'');
385: tok.setTrimmerMatcher(StrMatcher.trimMatcher());
386: tok.setIgnoreEmptyTokens(false);
387: tok.setEmptyTokenAsNull(true);
388: assertEquals("a", tok.next());
389: assertEquals("b c", tok.next());
390: assertEquals("d", tok.next());
391: assertEquals(false, tok.hasNext());
392: }
393:
394: public void testBasicQuoted5() {
395: String input = "a: 'b'x'c' :d";
396: StrTokenizer tok = new StrTokenizer(input, ':', '\'');
397: tok.setTrimmerMatcher(StrMatcher.trimMatcher());
398: tok.setIgnoreEmptyTokens(false);
399: tok.setEmptyTokenAsNull(true);
400: assertEquals("a", tok.next());
401: assertEquals("bxc", tok.next());
402: assertEquals("d", tok.next());
403: assertEquals(false, tok.hasNext());
404: }
405:
406: public void testBasicQuoted6() {
407: String input = "a:'b'\"c':d";
408: StrTokenizer tok = new StrTokenizer(input, ':');
409: tok.setQuoteMatcher(StrMatcher.quoteMatcher());
410: assertEquals("a", tok.next());
411: assertEquals("b\"c:d", tok.next());
412: assertEquals(false, tok.hasNext());
413: }
414:
415: public void testBasicQuoted7() {
416: String input = "a:\"There's a reason here\":b";
417: StrTokenizer tok = new StrTokenizer(input, ':');
418: tok.setQuoteMatcher(StrMatcher.quoteMatcher());
419: assertEquals("a", tok.next());
420: assertEquals("There's a reason here", tok.next());
421: assertEquals("b", tok.next());
422: assertEquals(false, tok.hasNext());
423: }
424:
425: public void testBasicQuotedTrimmed1() {
426: String input = "a: 'b' :";
427: StrTokenizer tok = new StrTokenizer(input, ':', '\'');
428: tok.setTrimmerMatcher(StrMatcher.trimMatcher());
429: tok.setIgnoreEmptyTokens(false);
430: tok.setEmptyTokenAsNull(true);
431: assertEquals("a", tok.next());
432: assertEquals("b", tok.next());
433: assertEquals(null, tok.next());
434: assertEquals(false, tok.hasNext());
435: }
436:
437: public void testBasicTrimmed1() {
438: String input = "a: b : ";
439: StrTokenizer tok = new StrTokenizer(input, ':');
440: tok.setTrimmerMatcher(StrMatcher.trimMatcher());
441: tok.setIgnoreEmptyTokens(false);
442: tok.setEmptyTokenAsNull(true);
443: assertEquals("a", tok.next());
444: assertEquals("b", tok.next());
445: assertEquals(null, tok.next());
446: assertEquals(false, tok.hasNext());
447: }
448:
449: public void testBasicTrimmed2() {
450: String input = "a: b :";
451: StrTokenizer tok = new StrTokenizer(input, ':');
452: tok.setTrimmerMatcher(StrMatcher.stringMatcher(" "));
453: tok.setIgnoreEmptyTokens(false);
454: tok.setEmptyTokenAsNull(true);
455: assertEquals("a", tok.next());
456: assertEquals("b", tok.next());
457: assertEquals(null, tok.next());
458: assertEquals(false, tok.hasNext());
459: }
460:
461: public void testBasicIgnoreTrimmed1() {
462: String input = "a: bIGNOREc : ";
463: StrTokenizer tok = new StrTokenizer(input, ':');
464: tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE"));
465: tok.setTrimmerMatcher(StrMatcher.trimMatcher());
466: tok.setIgnoreEmptyTokens(false);
467: tok.setEmptyTokenAsNull(true);
468: assertEquals("a", tok.next());
469: assertEquals("bc", tok.next());
470: assertEquals(null, tok.next());
471: assertEquals(false, tok.hasNext());
472: }
473:
474: public void testBasicIgnoreTrimmed2() {
475: String input = "IGNOREaIGNORE: IGNORE bIGNOREc IGNORE : IGNORE ";
476: StrTokenizer tok = new StrTokenizer(input, ':');
477: tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE"));
478: tok.setTrimmerMatcher(StrMatcher.trimMatcher());
479: tok.setIgnoreEmptyTokens(false);
480: tok.setEmptyTokenAsNull(true);
481: assertEquals("a", tok.next());
482: assertEquals("bc", tok.next());
483: assertEquals(null, tok.next());
484: assertEquals(false, tok.hasNext());
485: }
486:
487: public void testBasicIgnoreTrimmed3() {
488: String input = "IGNOREaIGNORE: IGNORE bIGNOREc IGNORE : IGNORE ";
489: StrTokenizer tok = new StrTokenizer(input, ':');
490: tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE"));
491: tok.setIgnoreEmptyTokens(false);
492: tok.setEmptyTokenAsNull(true);
493: assertEquals("a", tok.next());
494: assertEquals(" bc ", tok.next());
495: assertEquals(" ", tok.next());
496: assertEquals(false, tok.hasNext());
497: }
498:
499: public void testBasicIgnoreTrimmed4() {
500: String input = "IGNOREaIGNORE: IGNORE 'bIGNOREc'IGNORE'd' IGNORE : IGNORE ";
501: StrTokenizer tok = new StrTokenizer(input, ':', '\'');
502: tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE"));
503: tok.setTrimmerMatcher(StrMatcher.trimMatcher());
504: tok.setIgnoreEmptyTokens(false);
505: tok.setEmptyTokenAsNull(true);
506: assertEquals("a", tok.next());
507: assertEquals("bIGNOREcd", tok.next());
508: assertEquals(null, tok.next());
509: assertEquals(false, tok.hasNext());
510: }
511:
512: //-----------------------------------------------------------------------
513: public void testListArray() {
514: String input = "a b c";
515: StrTokenizer tok = new StrTokenizer(input);
516: String[] array = tok.getTokenArray();
517: List list = tok.getTokenList();
518:
519: assertEquals(Arrays.asList(array), list);
520: assertEquals(3, list.size());
521: }
522:
523: //-----------------------------------------------------------------------
524: public void testCSV(String data) {
525: this .testXSVAbc(StrTokenizer.getCSVInstance(data));
526: this
527: .testXSVAbc(StrTokenizer.getCSVInstance(data
528: .toCharArray()));
529: }
530:
531: public void testCSVEmpty() {
532: this .testEmpty(StrTokenizer.getCSVInstance());
533: this .testEmpty(StrTokenizer.getCSVInstance(""));
534: }
535:
536: public void testCSVSimple() {
537: this .testCSV(CSV_SIMPLE_FIXTURE);
538: }
539:
540: public void testCSVSimpleNeedsTrim() {
541: this .testCSV(" " + CSV_SIMPLE_FIXTURE);
542: this .testCSV(" \n\t " + CSV_SIMPLE_FIXTURE);
543: this .testCSV(" \n " + CSV_SIMPLE_FIXTURE + "\n\n\r");
544: }
545:
546: void testEmpty(StrTokenizer tokenizer) {
547: this .checkClone(tokenizer);
548: assertEquals(false, tokenizer.hasNext());
549: assertEquals(false, tokenizer.hasPrevious());
550: assertEquals(null, tokenizer.nextToken());
551: assertEquals(0, tokenizer.size());
552: try {
553: tokenizer.next();
554: fail();
555: } catch (NoSuchElementException ex) {
556: }
557: }
558:
559: public void testGetContent() {
560: String input = "a b c \"d e\" f ";
561: StrTokenizer tok = new StrTokenizer(input);
562: assertEquals(input, tok.getContent());
563:
564: tok = new StrTokenizer(input.toCharArray());
565: assertEquals(input, tok.getContent());
566:
567: tok = new StrTokenizer();
568: assertEquals(null, tok.getContent());
569: }
570:
571: //-----------------------------------------------------------------------
572: public void testChaining() {
573: StrTokenizer tok = new StrTokenizer();
574: assertEquals(tok, tok.reset());
575: assertEquals(tok, tok.reset(""));
576: assertEquals(tok, tok.reset(new char[0]));
577: assertEquals(tok, tok.setDelimiterChar(' '));
578: assertEquals(tok, tok.setDelimiterString(" "));
579: assertEquals(tok, tok.setDelimiterMatcher(null));
580: assertEquals(tok, tok.setQuoteChar(' '));
581: assertEquals(tok, tok.setQuoteMatcher(null));
582: assertEquals(tok, tok.setIgnoredChar(' '));
583: assertEquals(tok, tok.setIgnoredMatcher(null));
584: assertEquals(tok, tok.setTrimmerMatcher(null));
585: assertEquals(tok, tok.setEmptyTokenAsNull(false));
586: assertEquals(tok, tok.setIgnoreEmptyTokens(false));
587: }
588:
589: /**
590: * Tests that the {@link StrTokenizer#clone()} clone method catches {@link CloneNotSupportedException} and returns
591: * <code>null</code>.
592: */
593: public void testCloneNotSupportedException() {
594: Object notCloned = (new StrTokenizer() {
595: Object cloneReset() throws CloneNotSupportedException {
596: throw new CloneNotSupportedException("test");
597: }
598: }).clone();
599: assertNull(notCloned);
600: }
601:
602: public void testCloneNull() {
603: StrTokenizer tokenizer = new StrTokenizer((char[]) null);
604: // Start sanity check
605: assertEquals(null, tokenizer.nextToken());
606: tokenizer.reset();
607: assertEquals(null, tokenizer.nextToken());
608: // End sanity check
609: StrTokenizer clonedTokenizer = (StrTokenizer) tokenizer.clone();
610: tokenizer.reset();
611: assertEquals(null, tokenizer.nextToken());
612: assertEquals(null, clonedTokenizer.nextToken());
613: }
614:
615: public void testCloneReset() {
616: char[] input = new char[] { 'a' };
617: StrTokenizer tokenizer = new StrTokenizer(input);
618: // Start sanity check
619: assertEquals("a", tokenizer.nextToken());
620: tokenizer.reset();
621: assertEquals("a", tokenizer.nextToken());
622: // End sanity check
623: StrTokenizer clonedTokenizer = (StrTokenizer) tokenizer.clone();
624: input[0] = 'b';
625: tokenizer.reset();
626: assertEquals("b", tokenizer.nextToken());
627: assertEquals("a", clonedTokenizer.nextToken());
628: }
629:
630: // -----------------------------------------------------------------------
631: public void testConstructor_String() {
632: StrTokenizer tok = new StrTokenizer("a b");
633: assertEquals("a", tok.next());
634: assertEquals("b", tok.next());
635: assertEquals(false, tok.hasNext());
636:
637: tok = new StrTokenizer("");
638: assertEquals(false, tok.hasNext());
639:
640: tok = new StrTokenizer((String) null);
641: assertEquals(false, tok.hasNext());
642: }
643:
644: //-----------------------------------------------------------------------
645: public void testConstructor_String_char() {
646: StrTokenizer tok = new StrTokenizer("a b", ' ');
647: assertEquals(1, tok.getDelimiterMatcher().isMatch(
648: " ".toCharArray(), 0, 0, 1));
649: assertEquals("a", tok.next());
650: assertEquals("b", tok.next());
651: assertEquals(false, tok.hasNext());
652:
653: tok = new StrTokenizer("", ' ');
654: assertEquals(false, tok.hasNext());
655:
656: tok = new StrTokenizer((String) null, ' ');
657: assertEquals(false, tok.hasNext());
658: }
659:
660: //-----------------------------------------------------------------------
661: public void testConstructor_String_char_char() {
662: StrTokenizer tok = new StrTokenizer("a b", ' ', '"');
663: assertEquals(1, tok.getDelimiterMatcher().isMatch(
664: " ".toCharArray(), 0, 0, 1));
665: assertEquals(1, tok.getQuoteMatcher().isMatch(
666: "\"".toCharArray(), 0, 0, 1));
667: assertEquals("a", tok.next());
668: assertEquals("b", tok.next());
669: assertEquals(false, tok.hasNext());
670:
671: tok = new StrTokenizer("", ' ', '"');
672: assertEquals(false, tok.hasNext());
673:
674: tok = new StrTokenizer((String) null, ' ', '"');
675: assertEquals(false, tok.hasNext());
676: }
677:
678: //-----------------------------------------------------------------------
679: public void testConstructor_charArray() {
680: StrTokenizer tok = new StrTokenizer("a b".toCharArray());
681: assertEquals("a", tok.next());
682: assertEquals("b", tok.next());
683: assertEquals(false, tok.hasNext());
684:
685: tok = new StrTokenizer(new char[0]);
686: assertEquals(false, tok.hasNext());
687:
688: tok = new StrTokenizer((char[]) null);
689: assertEquals(false, tok.hasNext());
690: }
691:
692: //-----------------------------------------------------------------------
693: public void testConstructor_charArray_char() {
694: StrTokenizer tok = new StrTokenizer("a b".toCharArray(), ' ');
695: assertEquals(1, tok.getDelimiterMatcher().isMatch(
696: " ".toCharArray(), 0, 0, 1));
697: assertEquals("a", tok.next());
698: assertEquals("b", tok.next());
699: assertEquals(false, tok.hasNext());
700:
701: tok = new StrTokenizer(new char[0], ' ');
702: assertEquals(false, tok.hasNext());
703:
704: tok = new StrTokenizer((char[]) null, ' ');
705: assertEquals(false, tok.hasNext());
706: }
707:
708: //-----------------------------------------------------------------------
709: public void testConstructor_charArray_char_char() {
710: StrTokenizer tok = new StrTokenizer("a b".toCharArray(), ' ',
711: '"');
712: assertEquals(1, tok.getDelimiterMatcher().isMatch(
713: " ".toCharArray(), 0, 0, 1));
714: assertEquals(1, tok.getQuoteMatcher().isMatch(
715: "\"".toCharArray(), 0, 0, 1));
716: assertEquals("a", tok.next());
717: assertEquals("b", tok.next());
718: assertEquals(false, tok.hasNext());
719:
720: tok = new StrTokenizer(new char[0], ' ', '"');
721: assertEquals(false, tok.hasNext());
722:
723: tok = new StrTokenizer((char[]) null, ' ', '"');
724: assertEquals(false, tok.hasNext());
725: }
726:
727: //-----------------------------------------------------------------------
728: public void testReset() {
729: StrTokenizer tok = new StrTokenizer("a b c");
730: assertEquals("a", tok.next());
731: assertEquals("b", tok.next());
732: assertEquals("c", tok.next());
733: assertEquals(false, tok.hasNext());
734:
735: tok.reset();
736: assertEquals("a", tok.next());
737: assertEquals("b", tok.next());
738: assertEquals("c", tok.next());
739: assertEquals(false, tok.hasNext());
740: }
741:
742: //-----------------------------------------------------------------------
743: public void testReset_String() {
744: StrTokenizer tok = new StrTokenizer("x x x");
745: tok.reset("d e");
746: assertEquals("d", tok.next());
747: assertEquals("e", tok.next());
748: assertEquals(false, tok.hasNext());
749:
750: tok.reset((String) null);
751: assertEquals(false, tok.hasNext());
752: }
753:
754: //-----------------------------------------------------------------------
755: public void testReset_charArray() {
756: StrTokenizer tok = new StrTokenizer("x x x");
757:
758: char[] array = new char[] { 'a', ' ', 'c' };
759: tok.reset(array);
760: array[1] = 'b'; // test linked array
761: assertEquals("abc", tok.next());
762: assertEquals(false, tok.hasNext());
763:
764: tok.reset((char[]) null);
765: assertEquals(false, tok.hasNext());
766: }
767:
768: //-----------------------------------------------------------------------
769: public void testTSV() {
770: this
771: .testXSVAbc(StrTokenizer
772: .getTSVInstance(TSV_SIMPLE_FIXTURE));
773: this .testXSVAbc(StrTokenizer.getTSVInstance(TSV_SIMPLE_FIXTURE
774: .toCharArray()));
775: }
776:
777: public void testTSVEmpty() {
778: this .testEmpty(StrTokenizer.getCSVInstance());
779: this .testEmpty(StrTokenizer.getCSVInstance(""));
780: }
781:
782: void testXSVAbc(StrTokenizer tokenizer) {
783: this .checkClone(tokenizer);
784: assertEquals(-1, tokenizer.previousIndex());
785: assertEquals(0, tokenizer.nextIndex());
786: assertEquals(null, tokenizer.previousToken());
787: assertEquals("A", tokenizer.nextToken());
788: assertEquals(1, tokenizer.nextIndex());
789: assertEquals("b", tokenizer.nextToken());
790: assertEquals(2, tokenizer.nextIndex());
791: assertEquals("c", tokenizer.nextToken());
792: assertEquals(3, tokenizer.nextIndex());
793: assertEquals(null, tokenizer.nextToken());
794: assertEquals(3, tokenizer.nextIndex());
795: assertEquals("c", tokenizer.previousToken());
796: assertEquals(2, tokenizer.nextIndex());
797: assertEquals("b", tokenizer.previousToken());
798: assertEquals(1, tokenizer.nextIndex());
799: assertEquals("A", tokenizer.previousToken());
800: assertEquals(0, tokenizer.nextIndex());
801: assertEquals(null, tokenizer.previousToken());
802: assertEquals(0, tokenizer.nextIndex());
803: assertEquals(-1, tokenizer.previousIndex());
804: assertEquals(3, tokenizer.size());
805: }
806:
807: public void testIteration() {
808: StrTokenizer tkn = new StrTokenizer("a b c");
809: assertEquals(false, tkn.hasPrevious());
810: try {
811: tkn.previous();
812: fail();
813: } catch (NoSuchElementException ex) {
814: }
815: assertEquals(true, tkn.hasNext());
816:
817: assertEquals("a", tkn.next());
818: try {
819: tkn.remove();
820: fail();
821: } catch (UnsupportedOperationException ex) {
822: }
823: try {
824: tkn.set("x");
825: fail();
826: } catch (UnsupportedOperationException ex) {
827: }
828: try {
829: tkn.add("y");
830: fail();
831: } catch (UnsupportedOperationException ex) {
832: }
833: assertEquals(true, tkn.hasPrevious());
834: assertEquals(true, tkn.hasNext());
835:
836: assertEquals("b", tkn.next());
837: assertEquals(true, tkn.hasPrevious());
838: assertEquals(true, tkn.hasNext());
839:
840: assertEquals("c", tkn.next());
841: assertEquals(true, tkn.hasPrevious());
842: assertEquals(false, tkn.hasNext());
843:
844: try {
845: tkn.next();
846: fail();
847: } catch (NoSuchElementException ex) {
848: }
849: assertEquals(true, tkn.hasPrevious());
850: assertEquals(false, tkn.hasNext());
851: }
852:
853: //-----------------------------------------------------------------------
854: public void testTokenizeSubclassInputChange() {
855: StrTokenizer tkn = new StrTokenizer("a b c d e") {
856: protected List tokenize(char[] chars, int offset, int count) {
857: return super .tokenize("w x y z".toCharArray(), 2, 5);
858: }
859: };
860: assertEquals("x", tkn.next());
861: assertEquals("y", tkn.next());
862: }
863:
864: //-----------------------------------------------------------------------
865: public void testTokenizeSubclassOutputChange() {
866: StrTokenizer tkn = new StrTokenizer("a b c") {
867: protected List tokenize(char[] chars, int offset, int count) {
868: List list = super .tokenize(chars, offset, count);
869: Collections.reverse(list);
870: return list;
871: }
872: };
873: assertEquals("c", tkn.next());
874: assertEquals("b", tkn.next());
875: assertEquals("a", tkn.next());
876: }
877:
878: //-----------------------------------------------------------------------
879: public void testToString() {
880: StrTokenizer tkn = new StrTokenizer("a b c d e");
881: assertEquals("StrTokenizer[not tokenized yet]", tkn.toString());
882: tkn.next();
883: assertEquals("StrTokenizer[a, b, c, d, e]", tkn.toString());
884: }
885:
886: }
|